var/home/core/zuul-output/0000755000175000017500000000000015150252332014523 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015150261751015474 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000320043215150261631020252 0ustar corecorecikubelet.lognc9r~DYA6ZF,-K$l"mklkcQӖHSd. _y6b}Wߟ/nm͊wqɻlOxN_ X??xI[mEy},fۮWe~7Nû/wb~1;ZxsY~ݳ( 2[$7۫j{Zw鶾z?&~|XLXlN_/:oXx$%X"LADA@@tkޕf{5Wbx=@^J})K3x~JkwI|YowS˷j̶֛]/8 N Rm(of`\r\L>{Jm 0{vR̍>dQQ.aLk~g\UlxDJfw6xi1U2 c#FD?2SgafO3|,ejoLR3[ D HJP1Ub2i]$HU^L_cZ_:F9TJJ{,mvgL;: ԓ$a;ɾ7lַ;̵3](uX|&kΆ2fb4NvS)f$UX dcю)""û5h< #чOɁ^˺b}0w8_u5Ոӄp\2dd$YLYG(#?%U?hB\;ErE& SOZXHBWy|iZ~hal\t2Hgb*t--ߖ|Hp(-J C?>:zR{܃ lM6_OފߍO1TsQO)qas ׇN# ZF͹b,*YVi+$<QMGhC}^}?BqG!(8l K3T[<~6]90}(*T7siv'=k 9Q2@vN ( R['>v*;o57sp$3ncx!>t®W>]tF-iܪ%GYbaRvHa}dkD̶*';ک|s_}8yj,('GrgTZ'U鋊TqOſ * /Ijo!՟8`"j}zӲ$k3jS|C7;A)͎V.r?t\WU1ojjr<~Tq> `=tJ!aݡ=h6Yݭw}?lѹ`f_" J9w4ts7NG GGG]ҡgc⌝M b/Ζlpah E ur C&`XR Jcw#:7erԮoQ#% HA BR 4X\r RYGVق?<6jHSJ Jno#ˏl_}zD}c6  %T%St{kJ_O{*Z8Y CEO+'HqZY PTUJ2dic3w ?YQgpa` Z_0΁?kMPc_Ԝ*΄Bs`kmJ?t 53@հ1hr}=5t;nt 9:I_|AאM'NO;uD,z҄R K&Nh c{A`?2ZҘ[a-0V&2D[d#L6l\Jk}8gf) afs'oIf'mf\>UxR ks J)'u4iLaNIc2qdNA&aLQVD R0*06V۽棬mpھ*V I{a 0Ҟҝ>Ϗ ,ȓw`Ȅ/2Zjǽ}W4D)3N*[kPF =trSE *b9ē7$ M_8.Ç"q ChCMAgSdL0#W+CUu"k"圀̲F9,,&h'ZJz4U\d +( 7EqڏuC+]CEF 8'9@OVvnNbm: X„RDXfיa }fqG*YƩ{P0K=( $hC=h2@M+ `@P4Re]1he}k|]eO,v^ȹ [=zX[tꆯI7c<ۃ'B쿫dIc*Qqk&60XdGY!D ' @{!b4ִ s Exb 5dKߤKߒ'&YILұ4q6y{&G`%$8Tt ȥ#5vGVO2Қ;m#NS8}d0Q?zLV3\LuOx:,|$;rVauNjk-ؘPꐤ`FD'JɻXC&{>.}y7Z,).Y톯h7n%PAUË?/,z_jx܍>М>ӗom$rۇnu~Y݇̇TIwӜ'}׃nxuoỴRZ&Yzbm ]) %1(Y^9{q"4e?x+ [Vz;E|d1&ږ/0-Vb=SSO|k1A[|gbͧɇد;:X:@;afU=Sru CK >Y%LwM*t{zƝ$;ȾjHim @tBODɆj>0st\t@HTu( v e`H*1aK`3CmF1K>*Mk{_'֜dN${OT-n,'}6ȴ .#Sqη9]5zoX#ZV |ɧ<Ғ8_iqE b}$B#fethBE;1"l r  B+R6Qp%;R8P󦟶Ub-L::;Ⱦ7,VW.JE:PgXoΰUv:ΰdɆΰ (ΰ0eTUgXun[g, ׽-t!X򴱞_aM:E.Qg1DllЊE҉L ehJx{̗Uɾ?si&2"C]u$.`mjmƒVe9f6NŐsLu6fe wkىKR%f"6=rw^)'Hz }x>1yFX09'A%bDb0!i(`Z;TyֻΗ|ִ0-6dAC5t[OM91c:VJR9&ksvJ;0ɝ$krogB= FYtЩOte=?>T&O{Ll)HClba1PIFĀ":tu^}.&R*!^pHPQuSVO$.KMb.:DK>WtWǭKv4@Va3"a`R@gbu%_J5Ґ 3Dc?lm$K/$s_. WM]̍"W%`lO2-"ew@E=$BrW XWz<%fpG"m%6PGEH^*JL֗J)oEv[Ң߃x[䚒}0BOnYr猸p$nu?ݣ RF]NHw2k혿q}lrCy u)xF$Z83Ec罋}[εUX%}< ݻln"sv&{b%^AAoۺ(I#hKD:Bߩ#蘈f=9oN*.Ѓ M#JC1?tean`3-SHq$2[ĜSjXRx?}-m6Mw'yR3q㕐)HW'X1BEb $xd(21i)//_і/Cޮm0VKz>I; >d[5Z=4>5!!T@[4 1.x XF`,?Hh]b-#3J( &uz u8.00-(9ŽZcX Jٯ^蒋*k.\MA/Xp9VqNo}#ƓOފgv[r*hy| IϭR-$$m!-W'wTi:4F5^z3/[{1LK[2nM|[<\t=3^qOp4y}|B}yu}뚬"P.ԘBn방u<#< A Q(j%e1!gkqiP(-ʢ-b7$66|*f\#ߍp{8sx[o%}wS`ýͽ>^U_S1VF20:d T2$47mSl*#lzFP_3yb.63>NKnJۦ^4*rB쑓:5Ǧ٨C.1`mU]+y_:,eXX맻c5ޖSwe݊O4L)69 War)|VϟT;Cq%KK-*i ѩQٰ`DݎGu( 꿢\cXn }7Ҫa nG{Y bcWa?\34 P U!7 _* kTuwmUr%ԀjƮĀdU#^ۈӕ3ΊeBO`^}ܖj49lnAvoI "%\;OF& wctغBܮl##mϸ.6p5k0C5PdKB g:=G<$w 24 6e/!~߽f)Q UbshY5mseڠ5_m4(sgz1v&YN2姟d4"?oWNW݃yh~%DTt^W7q.@ L⃳662G,:* $: e~7[/P%F on~$dƹɥO"dޢt|BpYqc@P`ڄj҆anCѢMU sf`Yɇك]@Rɯ?ٽf? ntպ$ˣ>TDNIGW .Z#YmDvS|]F)5vSsiExţ=8#r&ᘡĩDȈ\d cRKw*#zJ9tT :<XK*ɤwoJarExfKB4t@y[6OO6qDfEz]1,ʹB֒H ֱw;SpM8hGG&ƫEJި_1N`Ac2 GP)"nD&D #-aGoz%<ѡh (jF9L`fMN]eʮ"3_q7:.rRGT;}:֪a$)gPSj0j3hLư/7:D-F۶c}87uixoxG+5EekV{:_d* |a%ĉUHSR0=>u)oQCC'ǣC~방u)т˰vGL qG $ X:w06 E=oWlzN7st˪C:?*|kިfc]| &ب^[%F%LI<0(씖;4A\`TQ.b0NH;ݹ/n -3!: _Jq#Bh^4p|-G7|ڸ=Bx)kre_f |Nm8p5H!jR@Aiߒ߈ۥLFTk"5l9O'ϓl5x|_®&&n]#r̥jOڧK)lsXg\{Md-% >~Ӈ/( [ycy`ðSmn_O;3=Av3LA׊onxlM?~n Θ5 ӂxzPMcVQ@ӤomY42nrQ\'"P؝J7g+#!k{paqTԫ?o?VU}aK q;T0zqaj0"2p؋9~bޏt>$AZLk;3qUlWU Ry==qٻҦ62L0ډ"ܺ_z9JNȯ=@oUI y4z%lOONRѦmDVmxюݏX}K6"Qi32\-V_kR(I-wtSJR^m{d a|y,F9$^@mdH֙toN1 < ҷBq/ ۓ,j|z6OSu;BKŨʐPqO K\{jDiy@}b|Z79ߜih(+PKO;!o\戔-QB EM;oH$$]?4~YrXY%Ο@oHwlXiW\ΡbN}l4VX|"0]! YcVi)@kF;'ta%*xU㔸,A|@WJfVP6`ڼ3qY.[U BTR0u$$hG$0NpF]\ݗe$?# #:001w<{{B\rhGg JGIެE.:zYrY{*2lVǻXEB6;5NE#eb3aīNLd&@yz\?))H;h\ߍ5S&(w9Z,K44|<#EkqTkOtW]﮶f=.*LD6%#-tңx%>MZ'0-bB$ !)6@I<#`L8턻r\Kuz*]}%b<$$^LJ<\HGbIqܢcZW {jfѐ6 QڣPt[:GfCN ILhbB.*IH7xʹǙMVA*J'W)@9 Ѷ6jىY* 85{pMX+]o$h{KrҎl 5sÁbNW\: "HK<bdYL_Dd)VpA@A i"j<鮗 qwc&dXV0e[g#B4x╙✑3'-i{SEȢbK6}{Ⱥi!ma0o xI0&" 9cT)0ߢ5ڦ==!LgdJΆmΉO]T"DĊKٙ@qP,i Nl:6'5R.j,&tK*iOFsk6[E__0pw=͠qj@o5iX0v\fk= ;H J/,t%Rwóξ bnz݊Fg;hwxDm/Ҳ mwKB d$tʸhi!{Q8AXT!Ov6l~S]Ox* /nf?*/NwkUr/Xi;ĘU & h}\15e,=q(ft&qL(dC!I;rɳ ӍhbO8.?Ij/RY؞Onq{5N' ( InNf_nSIw{O;Brnm]Lm3`Mi CtS00g`*钏xL(O>*3b\N7dYܞLcn3rnNd8"is"1- ޑܧd[]~:'#;N(NknfV('I rcj2J1G<5 Nj̒Qh]ꍾZBn&Un' CyUM0nCj.&Oڣg\q0^Ϻ%4i" ZZG>Xr'XKc$2iσֹH<6N8HSg>uMik{Fm(W F@@{W+ߑ?X2hS4-=^YgpUHެbZ!y!ul@ڼ63" ۩:6=TZõ$E,ϓRV|G&$rr;J TtIHFE=RȬ]P pLm|?$%>Eü%mWO[>Xmw,*9.[G n >X8Ī;xW%dT:`ٓ~:QO,}j6j!yڦʲT:Pqҋh] H+&=>g| Z;D8ܶb:! Å{2:+au 6:!fF+0#+̬NY"!6a7#񕪰%:r|o5Znڧs?si/W qEU馥˟^_޶oڷOj'?nc]Rn\t3^邳塨Lɏ"8k8M~?M}OAH$77f|lgn I;.K*!<+"eK5c&`X:#;@B@[(K44sBFu M.MNWLlY]K᜴=/ VމYlϿ4i36$>m|_>9|dUA"{!$jKx E$K3hN(tÊ-#v#O N, 9g80Ǭ&VdӞ5W1!1KYd`,-*&>F~⯰&jb.~cNk BL_OG]Bv.A|'qT(Ol.' 4IE|@Iі)<-p JkQm1 `qacܗVc?)cl*&<}P媠E{-sVU>߇GUt\+n3X]Byoz)li$2cPs6D>TE-n# rve{椱I |p)U݋7yJw&PzDgi xs  xh\L r Ѥo Zt(I >|$>tnMdэoV#ہll/ؽnA(ȱbAj>C9O n6HNe">0]8@*0)QsUN8t^N+mXU q2EDö0^R) hCt{d}ܜFnԴ.2w⠪R/r| w,?VMqܙ7;qpUۚ5Tnj ۝jlN$q:w$U>tL)NC*<` `)ĉJآS2 z]gQ)Bی:D`W&jDk\7XD&?Y\9ȢG:${1`+i n8=%Ml%İȖb7AޗuV3A7ำqE*\qb'YpuHƩҬV nm=Ɂ-2=|5ʹ zi ' ׹U>8bK0%V\ t!Lku`+]c0h&)IVC)p| QUA:]XL/2La[Xѓ F;/-rtx-rei0hE˝ݸDt#{I} `v;jUvK S x1Q2XU&6k&lE"} Q\E)+u>.,SzbQ!g:l0r5aI`"Ǒm O\B!,ZDbjKM%q%Em(>Hm 2z=Eh^&hBk X%t>g:Y #)#vǷOV't d1 =_SEp+%L1OUaY쎹aZNnDZ6fV{r&ȑ|X!|i*FJT+gj׾,$'qg%HWc\4@'@—>9V*E :lw)e6;KK{s`>3X: P/%d1ؑHͦ4;W\hx锎vgqcU!}xF^jc5?7Ua,X nʬ^Cv'A$ƝKA`d;_/EZ~'*"ȜH*Duƽ˳bKg^raͭ̍*tPu*9bJ_ ;3It+v;3O'CX}k:U{⧘pvzz0V Y3'Dco\:^dnJF7a)AH v_§gbȩ<+S%EasUNfB7™:%GY \LXg3۾4\.?}f kj· dM[CaVۿ$XD'QǛU>UݸoRR?x^TE.1߬VwխmLaF݄",Uy%ífz,/o/Z^]ݖF\\UR7򱺹...m/~q[ /7n!7xB[)9nI [GۿsH\ow!>66}եl?|i [%۾s& Z&el-ɬeb.E)բA l1O,dE>-KjLOgeΏe|Bf".ax)֒t0E)J\8ʁ,Gulʂ+lh)6tqd!eó5d ¢ku|M"kP-&ђ5h ^pN0[|B>+q"/[ڲ&6!%<@fpѻKQ31pxFP>TU?!$VQ`Rc1wM "U8V15> =҆#xɮ}U`۸ہt=|X!~Pu(UeS@%Nb:.SZ1d!~\<}LY aBRJ@ѥuȑz.# 3tl7 ]وb Xnݔ[TN1|ttc‡-5=VrPhE0Ǐ}Wd|\aD;(;Ha.]1-{s1`HbKV$n}Z+sz'ʀ*E%N3o2c06JZW?V g>ed\)g.C]pj|4逜*@ nBID f"!!*7kS4޷V+8弔*A19`RI/Hй qPq3TY'퀜+/Ĥ'cp2\1: 0mtH,.7>\hSؗ΀ѩ آSNEYdEcaLF&"FhQ|![gIK v~,Jc%+8[dI368fp*CDrc3k.2WM:UbX[cO;R`RA]d+w!e rr솜[/V`+@;Τ`5d0ϕ_Lع`C"cK>JG.}Ε00e>& 2䯫vNj31c$ i '2Sn-51Y}rE~b>|Ď6Oj~ebIapul9| 3QtUqSCxTD7U9/nq.JYCtuc nrCtVDƖϧ;INOKx%'t+sFUJq:ǫf!NRT1D(3.8Q;І?O+JL0SU%jfˬ1lމZ|VA/.ȍȱh M-r ~[0AG꠭y*8D*-Rz_z{/S[*"꫒?`a;N6uilLn<Yllmb rY״͆jqTI!j.Pٱh s!:W_´KxA|Hk1nE6=W|$O -{]1Ak$ ѫQ6Plp;3F$RveL l5`:~@c>q,7}VE-Q8W70up˳ A¦g/OEU:غA>?=CۣPqȅlW11/$f*0@б 2Dݘrt +qrx!8 J&[V =͋A,z`S,J|L/vrʑ=}IhM4fG(Ȋ1{TT%41Oa'$ 4cZ{F_|gܾ;!͢7$e7+ |/ן*0 N^rݺ*т&NGIXUg'Jd f $MQbJ/z<\\ LǪ H` QJW#ˢ+yTؔciUQ*,zRbԼ ן;SYǹ$RGKmXBf~j2H`R<**2jFX*,$u3u-"u&Nsl&կ AO=… K6>g"%3hlZT `Z8 gB^Oj;GI~9V s))SR6btGlS7ЏLn//,d,xi| :/4^W6s#ӕ1I?y#C/xY\6VdUGhcP];417VC& B_[KdE+yG>3]Use>7Wb*4/Għd-D_>'ӅХZCYL0=_<!ط( :"(--M׺Z^KM -!9w׍s:םs?73{Dhf7_e=jAa+ `cM#Ͽe8:Hrm=V_>`C`}c|?/|%_*t_^ o1nšB>B%Rd2R;"\%wٗq9ovKV-⨞9z?hg8Xge,ŸwjŒfg*,V ^/vVyx;bah;˸tZQ]TYg,\y--2#LXIaiZ#Ѵִ7N&3Q1(DHM,)^Kj"&/y UNE$ >9A.c>rPXa{}e_UG"`NΤ,꺨^jVꢚk[*Wl$Zةed IפT׉&}KJOsuYRL[U/NX4_@ $j/!J xh㝥_iZ1=\PH~EE_P5=S>  [F^9/;<29HIGh g 8~:#}"Ddc^cXTwZO+y;KdTD"F,M`W"I䌞vѩP ugosϝZ w䨍sXqѴaq:w'0qŇ\j.z#m'q&Èbq:7:9;G@}}PEm ㏼XbJl=[C9<;>*c@r*oP0sk8YuGԽ.$T<#8d&4e;{'10X S0ۚ68=f՟|N^`)>}HGUK.Û4Y6yk}ZNBdm"ڜ]7Y zkgk('MReIKbogU # ~e^^9t]!޴wz9M(By[ $>25'pa :{MhO Nk!5dx&Uq@L'|{y(,8E@Hd{|c.ܥo9A%/5MDMs{R !Aa0kUgu)yzb40堩ˑ+$(EyZ$4I:e.M U4 Ǔ脏l&'a!D÷i !QoiKDHOݸWH@78K?h&x@c!N} ]--<:BϥS (dCNd]c}Ʒq ʄ Yo莀"8Pu5E>hXW^ |F`׆'s]± !k-/(>Ū}A~祸)R B1p5i({4@?:W9xMZ,w.xy>j-\#DySO)_~|Bn5j 8<>h8:i>L2 D$bp'xZv c{wg2al. >8#w>c;MB{$j$ZUA` _`4jRQTF7UE 25GE 5Q fqv5Ł~x m))w_x;g:bWyyۻJ)r)jSfzO[f䙻ܿڻx{xY6]Z7os @4HeUVf4ʾ3DfE9d_4ZUg<8?S=QDG͉RZa0SM8J/]=Hrjy{"ZZC0(WCkk'T85Zy5$fzA2hsx>qaYǂ1ϳoD~:"e( rʃYtM@p\bp4e5@apM}.0:r6!H}HȈ7IOO߽]o@5!1 c-i))kSTVRjuɳ*y'WLUzogC{Lұր0ȋo`*I֭ϳ7R0mO' 8iU>9q׀lV-%/4-p%iD0MDY s#3 p2zi䅔Fv1В'͕%w ]K*lp)(c1'(.S6_ 3voG#[*I1ăj\0vkÜ_ YJ򽏩/Q{-9?lMy£ZnRJ)ڂK55! ̆؇U5gq$ZkʄjTEuZ+ҏ؞"yh*E5d%V' ?D(n9F#JjVvo(k2nmݼQL==cG {ϡ AݥN>N_[17pۀhKs9 걯OuOo'yurDYЪ\k2ڕb8:ݦ%8g6S qGC>xVq09` nukhZurX\w6MR0ˑ(E·fZvЋ|#3]׳4CӝȧL7[Cj#5t窪{oVSWW ~) 7֖dXpAjl5vW΍ Py5T5viy\"`Zd?!k7)k[…Mu؆tU<{C~$S]YԹ%7-" ƆĺHӕR_ޡϫ ;DTkR"1tmO;8.$pnv>xmN>`oGA#JƳ{~I|!{ ^U9BEK#mO"]awA,Oqc.O3eO"TXܣKuUdq+k3d5iB>e]]g nOfW]73 lHB7m$~Շi 6O6n>U֥N7}gAcb1Ѫ&n##Y6$4?WMzc!e y4vgѭ`YՕ;gC\fz<x%\c?Y4b;Җx0 # 7~A~ z~Q8x  ofڍɴwm*8wlB67aB=m>5x+4̊kQ<ѐw.>;cRaGS\ #SзbΝJ2 iHyo5ƹm[+μݽHA9G=?;iox(H2o )îd5i$ X`?huM랂 >-5_$|0ġ.v̍C4(N?O_1>|ȣ(Ӥ}]Q]E'4aӴ&odV߾Tq]DZ|{|VC'0B1u7,/\C|kJ:PZU]N*mW$*/Stp=hQWlQxeU'㨪`?"<IcսMj'^|^:4:ncM[8M5\"țwoN,<;w'W?` <`4p8ߩ=Jt*NZl:uD.ܸDp]pn_ sWֽCQ4 L\Ǒ@!1gw8 nkG,nWS#??=Nc] N wȵ:ڀ]3(O"#GcwF~7bh))NB>V/@ .F +r%D6Hbq 0R #fon9#蚿(2psW Aܢ~$ ^9+@e_ EscwuIec:Cw% ڍ48k)2p9"BPt!nx5$]:ǜu}AG`.E0+iNh$}ݓ`Hb%؟o+:e+ȶ]S )"nG +^8TĄphҗ#Nd | 9-0IBt!J8;/` h&#LR`5O8^GvvN `_ONck;&&LP2oA> n n_@":'A]YvGb kCw(s ' u~Jȯu۩GJ^}=h"eBqVp Ni z : 'ߥ ޕca|$ ecI~cۙ1I 㹂gN~cWz澳Hp8"ef.*R/[Q̪Ԏ! ؙ; E#>71LWeYIȚF#;#~ 7 t!^ <΃:T_0ʼe= uCTS|xbqq}bBYBRvVdVYMP#/D#JM _.7If_YVēdD#6&,1ޓidTbus|*?M'V.߼V-`0)a+g[y5Q>VW)~VHVi6rH(O>6{{!1c+3G;vMJtGu+ #8q<#'9b9gh RHfE,t.hԏ˙C<{iuF{N5 |4Y*>WXrN1Lq8bKO`%X]]3l`L>Z7ٴ  ,3tqjg!鹵.꫏Vr7Y<%@1N:CJ@z~E^.[wO.NɭMJ AOޠW -y sqI mr}S )IɷIL ϫj.XCL|dUu$H=ם+zzNzKQyC{IbE=%7t"H$K뇃$&_h6~sz.@UR}x$< !4`aQ'ﺱoŝO& N:_">wWo& R P )d9|Q%w tT!P1P㪫a.+rz/3\ h7k/kX`t-?V *ʁ ZZU[+RHpm*RC`9JY<\`Ѣ %!Y^" }1wj0e9_Gprr4Oex<#>>{I'' Z ~?`ҥw'kr7dIQA q<۶Ҏfє^'|T_=4jO/a3V^w \Ú :=G/)\oD=(~7 Rϧ"|dCa;1E`Et4r{ܥ3K/0H(;W$@A9vO"}jyBuᬝZ&,[9pQUM>0ihyjѥ`.wL`*:-MGZHs[uڸM (lwђ'O]fbP\> _4FnhE0Ea5%\Lu4"MaE66S gnX/8ۏLQxC' AMa~#"]QdRͣxz=A!0,gYXmZk£Gi8ÑxQP.aIiHy+O[~tBaV1Ƨa>Ʀ+nȇ [=MmރOo2]F+$@=Wmի=y0tpAy_L Jh;MUoi]j躑7L'-ȧ֊f ?8ef\voSQq&r
q]gt\]yfQ廪)c4#iU)9_V%6̥̦ۣ%Š~WN"ѵ WLzձ<]4ϲ h3Nb,Hm,,u7ep=+:4GD= =VқB lP4.XAbWX_?l f6!HS[xV3nU~s8Y3B;K5\^ WϦ0ֻHo 64fQgIs6@qxhr+Kh x{_J՚:jՍ>J#).Ψ(O#0|+E)2`F8-zF_Z/KhmS)Y=Mrjw* }eI=bȺ,-|%.j&}REqm漄g|R]YGK}֫Kl];}rWdA]|F>Ȯëv~xяUߘ Fkc9 o9g%vnSe1:U&GZ{Ĭ^Yq Mw{_DE_g$J|x>k"B49\q l%\+ނY5,u%|q9E?`T_+hud"ۄZj†neLf}۪?țh嗞z f uQB T0B4il覨f=qAҢč! 0SqiD<Uq{cIqEelF}QlE{18 x[My",Ro$KeVἬT mӳ,XQ2yOdPl Țٗp]/^Ȟα9 \yb^f!'se.@zFomAXkBO; gU%)1)zhʰuRgr&Ms /DE혇e]ڨ`i,S,Lr 69t>F{UfX4ϟMXjUgi|RgڼLUK}J֦U<-`@9/2?(( ԆdZoj;u0(,3T[6߀#'ꆇ&Q n:1cw"q!mi)@jl(İ&kseبi=}j"<Wxl1N7!1կ΄޻=-7n 3jp8 MdC2K*,#S7.)IpVC+3FY#PE1\صYȬiR$ Zڰ+"Ǡ3 vRQf[[w(&Y,I>8̈́\R}-38$[띲8pMAjUG5<] 6iхAD[J9cu(t^V#ؾ+3&ѽRH4g8MqCu)QِZ$k896V+°E h8KE0) u J(vO%l&gUCԿ?^޷D5t%NI&/$?#vJ4Ya1lcA"1Z۬&ȞbH"%=K,'N|Ӈe?Ou?Ӈl|ڂfaboocFXMp6F%Hkw~h5$?lf }a7?Oÿm@t~j#˲/`ݡ7g Kq;^?6jݣ3R i#QW69?ne*eeڤg!{ ESV6KXP/m?Vv;+ǓpMĢL. KmKq7زi/Gn0,gʴ0$3R;5_KY#^`4 v ![sNAϪ@REB&"cU#WL cC&>j |cD۳qѢ%m k1 Bk0~TTS%YY['^KP =pJf^B7 |z{dٚ87I}͘!D`7G3dS˄bf}Yamuр{=1TޜUa S gܭ(^tVOXcВ6!ZF.{93*1cCtt\߬וGY|8¾8 Vy<$bY[wSm i!Ycv=WiH4gp/jU y/z,ЅylуX,Av!cN`wy97AK:kM$|c=X#zʡzKLJ׫gUizőTizٲ89">5ߝ]pY;PsqpB|l`YHr^'.`=^˶e9 jr-Aw[VRx\L"Ki5N%#e-+lФ-7*}#nY%ݸ Ewڥ@MȦYºhdtVQnYލIz{kh$ 3c8JBKz)elhbȋy~x+3pLZzc1NAPɂNJ)xkIq{v.Bxy6}b#E{v,ܥfvtyx_LL͠5 | [mOgqu>5dQ/Vaԏjk!T aMF'2Gt;7 Òծŕ[=?y3kd 0~5 ٬zvR(jKpx qng@@9ZdM b2 t&P,3;~aQ/Jy`EvNXf2 gæX3^ۄ|_;<V^ml?_q䬣sSv9Ҥnh&VX7f'Ю%h70UxϰCk^ӱkUP*#F !ZԪ// "~K sp 0ui .x3S,mUG7p*u[| "lGl]:'0&h{ z2%C9+F0m]yDDdž`\ώ2ti莨 Ԉ?k_%0 ~/c*K uzMtRPh$т>1pY2.ĺ vnt4{~}>m??Yїe7޻wӶYp-)jXWt&|Q"xiu=>/BBz ú]S1dV~tsb$+F]R膉q4 mcUum27fRRqq)Yw=&O(߱8fxۭhO41Qا D@}/G5YT/RZ1wru[P[):XM8+1vyN<짷LsC('46ߜ1BnB/< d(Dջe[ E)^jewvPf!SmG⵽RG>M=%63QM3`&9I~,т,yV* "JQHb/>{QJkJ}Y_m`u9@œoDooYu^_j܁5  b ,:n%ucKΝpK;}^8YTp]> S&w'?Hl#auY|V#2)`|}9%<N;.QO&;5-Xz^?p4psryԆ…P/;Sz.,zj*p9ꋐ |2f́cꬪ )8o>_Ϋ4hӤ'4HΎwyn0yȩA=]«U-pZ,(Y*/z@;'R"jB+^蛠o7h!QF3|MN+έGzà ~nbF)S(8 H1vuuAhntcBTsTOCRA{K4&:)/WOl {0=U\3Qxzy._rՅMRaKL{,NOkB8zrL+$}ũvr2wL2NZMZ`|Ęh|Stid腨JtߪG5<=1ìMq;B2n(8vꔛ*eu)3:}ŧ⸬$p|{({8j*S蕥$kue9dS{e4nw6ߞ֓=SÙwz٩^keSDjL=U좈qyf٩{HʴO-btƆҽ;(w dzRy`H"3o=Yn_r:pxx!Ͽ?_"|7mAD Q0JժHM*?6~;1Mߔ/w6;+uCm"}ָ:|!}O = b#:!e&eNVÀ8eu{qR@`ZkBܷ ])7gxOvO,[d)%R3^N`8R= v;ZIAt\}ϟYpL&0.@Nɔ@mȐSV :] Kpy{H+|1?o+dտSS,tǭOgdʺV;8)TTS%YYMt\GJ MQzfs![7NYU3"m6Z(h]Ҋ*Ktᆲ#RƳR:+<{#x^z1,9CsJޢ2Ҷ/Wiłcϭ,0ŝV! 骂'J]"IK`|sϝjL-htgjdpI8Y՘-ʝWKR̈́o6K 8=YTo1 y݅fqr6Yxs}ǁ ~Lǿb8wYvs_S 7Y;(c(c< GcW5 JI~o&{ГKlo&7 . M mQ70t/l)-39zjsRo !܇`n2C#kdjBySX-XbVٳ1hPLGXu=5%rlj悧EM"`VRlU$&̯ڈX zs> GXNx2lrz*`xƮ NijpJz2fO$-Ҫ[|yƄۻ_X:5;4j^ 9e{V(_;[&3O%(#3O:dL—xsm*.OKokB2=Yp\.y`8nY _xѝfJZ/ Xp]`c:zp! )A i6K5G49 {<`5'+Ƀ&+]Ws(';5(Qr '&y\.7,\ 'ϑ;up͡GZ,f,UY1)HydWX':)NoV&=,XnY̱E~=IkpvL6 z./T$<+j"P:ʲ8*)$UU8έH™1jD p4|‚ޓSnq3zԓV9DfaГSE ؑ ]=>ּ丠g%Ul\: P.C,65luQbqrjkr 4= ݮ. gm&Nt-@)7 A%J~1ҕ"Eϗ6漴?"kY3,2) s0:5YUJIVbb&Nu_q$9G>8A,|mN&AYmHPB ~ڟU3Bn vM<8hsj-RJG>ֻJ$ٔ,]*WbǝkkN7՚='aysy=n6bL[o,pNfw٣4AƲ$y{Zf)Btf聪Ik.*K7iuhh]ݣx]Gkme!iRDҜ!l55&P\,"%E\-)rE˙y涑8\>~QSk_ %co_ߪkEB/QJNRFtv*=4P.ȍJOwj ?QqAKOfnbLEj8m%0mߔo{GQ;|imy=뛋*+HEW5/ 6ن~ݞ ЮZ}A"Ly2n}pbh]\9K}&=m s,9lDal0,5٧dhhS\xGë,mH(i?1iHX\)ddKq瀻%Gȝp2tϼDۺ$$ҵIR3URwF=mp{orUPDI*p W816(-X-PXܳ0zש⵱pOq]sE #`һ w~CYo N_Bt.<m|xC }O.oO2|>=N$sl/M//+dݕ>F:BB¯oO? {Wa4>6=y[mdѢaI4|ʗӷI~p9\OSJzqVR䜥/iU} D T:ȅ_P dI6qG#͎hI;{w/)w ̷ዜ) 0?.mJ}vV&wG {dwڕi?>ȜN*zZ$Z;lǜa)!SO:⽠Onl c8dk-~`ZDnXr)~ڢ9.rևEşÑ.qcx39PQ:t}'jKAH;dtUu$R\>L+L'!yB=GIV8ݡ]Xư}x$-p{N Df_1cǏ}Q6KI**odp9K~-d8!Ն\hӡ4 ,%LI:tWJ(!v d.Yܳ1%9;H%’eCix˙[ĺ).'Zغ6#F 蔩0 tLQuTHB A*;Ć5ahoqƹmdeZtR)n:d(% %堺@ZU+m,j[H'-t6y oX}=u D<f;Cl[qt A测674oz.d+DtZCWAg" x:t \0+NyхN ?`vhӝb:i;(.N2I˗;I|c\tuF&:[cܶe~xEzilG`+"-pF 7uKiU9MڰK{}a7}}Xed>+ǃ;hO=aZw!I*u&85hM~SeS {z[jjl ESxt:OGG x6":ʍSBΝZùvt7WWSȲ*ψv/{5|"χVf0GSA[tḧ́S6;cnfj/ks!'ibvMj<tjYx>0@Mj #BiA<KXhWmg.W^a-HLvl-3b0t=2;2lge){(5Ƿ nK9AŃbj]RX?cږi1c;HwJ0J8en^2:_Ῥ=&i9 qͭR<2ٜV\Be /6~?dxq8kr5b8ېF^3EpM ״rC!_FBe3fgTlcd敨 EH= $KG ?&?_%1J|CUº-kDMpѰߋ,ZFm4-bdSՑ[J^iv؎Q`Ao]n*{]pwgzf&Mq~X˟4\{{b@:h%ku/ZP}NOޖ)%5Y"d'*0ۏ8n&bJζ&o#lf2spI+Wା@498S;kv7/i:kkʨkG7_%Mpv|G}a:R(4{".!yz;,pB"+N=0 |TYq1DYV΃ 8POY$Dn G! Czg7Ws7]v O[n0L`oH?O>k;-GϷP`n &ufh Z% 1.x11o5U<8%u$l\m;jŌ`⵳x͌e:3Gg$L^L䠬E97[n r V{ND|a+3mGѶ3CM[gƒ Ό\,j"q4&6`$hu<Ì8h[0GQ23vԊm;o km3cYլ eiӂS˴+/rtKZoy+b΅q.VfFێZ1mOfϜTA!׬?pix]'o IR序9xpûtv16]w/]&*ǘp'ONE>Q?ż(%OࠫBnRGd_tAz(|J&G /,+mv0bE-Y1RMN/iڦ~B}:E9U;_`E/R'6uQ\MWIZEjA WZ%W 6K$5|{?R+4EiQ:O-g`L?,"sxZ,ہq8ξ>oN*XR.Lr,> %l䛻dCyZoڦkD/Οj |m<3 *jYwK@%j}P5$Es*hh&(N&WDB}S>ٸ)2hxu?˄1Evݷ4+T\I֠A z)q~x(u FgXw3qnFo!dS*]]-2Rw]{:,VL$~<+6qY; hEչ'ZB* R\z@*R(.% <eFW(S.RAadCc9, sE,'C+L[JK)@J`3Z+-0ꑅ1( s6x#Zh[_898W :%QBI1Bc*8E@XAEsP*䖒&bj ӸsE{זGRޞ.68=._:)&fI4ki#JR(j=D͑%9ۓ7)4%^_NM0-m&]`8_L`-z&Қ-fg@:c8 4NZήhHQuLtZ [uITylb9gKΌb+m03_z-ilntwlIhwluf0,]570ŧ; y4H)1w!0IpsRHĚ)TTLwyjK6N3&;e9!+͙2ݱҜSn5KlP+kKl e}l-%V:ّ09T<<S L5l gLk<8I @HaNY>#i,3ي\vlKE%g!mWVJ6l/f9L@Q)ׯǶ c'^O!Y2lJ@+W\`^$ J=oc#(/4^OyF t~PAȋ: t:Ô"r FG%=i=k|wJ8ˇ<)RDL&a3fۮ 5MU\r˕UAxi3*gHd1rbq\([i F0tH'y#pr͜O|o8Tb  hI9wbz˵2-eD @(U4M˔|d t Z2(FPR#`CN#MAviJUW$ l ј l-p8Urn 2FR(?BPb'XǏҥB <'X:8.##ZH#rS|Рr``C Z;>tYژAU)cJֻG9pyG1FuKuQ06ڣ*SNq QjfR.,8aSKeK/\1^u#:w,`*w bƖSLgH3Y&2Y& 7(婢,<uAjEr4)[ʦ~=$MC7E2Zd[dne4b"X6Av/\V~pCA18j4Hy-7R<ѠՓ{>+MKO2T0qjU}`Cdx5T0H{(L4B JOþ:.vڽZ$5{W&1p+sƕ9FUp: aFdz\P:OF$(fPn%Ps]hB̪ rL<\.*X<]e,U SI6/KN[ &m[asr2e 6*##UrBZ,ħqI sIbX!PѤi)4\l,0[ :lM5BF)^hAC\AqhPD47o0&hZNE 1{{Pr@" BFS(C;5O&WSshl4Hjʨf^4MҦl .xhKk qЎ&_\`'\nb!fZHF~Eޓ 0:9RB:#({|Aޯ) F0>`'\v{ZGgֶlP{l^0fZ,/_YjFX#_Vr(dN3Я:\^eYOTcHһvJsGzGZ2ߠa3'Qtg>z_.uR@=UuaxiovHy%|Xa'+8>٨; T 9Un)p_v';N{t{Ǫ;PTZ 9JKf%pW4%r1SϰӛzP;n+jyǑ>TmYNcL] 8Ƀ'\xmV0f+˻'_'.wg\{ىL%ter}p"+WMS͟pq=yx*žoܿnڦr㫢K j.,w3i{,Di0-3/>5NguHAPKKn@l4 3>eᇕj.͊Սԍ͘'K[U7_*Y| ɟ>|x;(U1yU/ë7p Qwff|+ʣGϳX}e$$=Az5ݠA셽y:YW<ŴÀĦ$j;1O'B^,<yueaϭcx~,r) >͍ 4[A5=B<;1ݣGU`ZJ4ׂDӺ_;f)%Tvڃn'0pxLSV}zb;L_&]rlHx,,XB i^t;r4v;P)mf4 m2D+?\wXquza7:{V2Ŏ8hl_-?~Q=ZI] f)I2khBq 9~%Ië?92?f}=zsu;EwDu/F0ps_=;9@lG8jqѻwp"G_]GAE/{X=z_=ePuJ/F`?|ůL-ۋ/% <k*( /jts yj{IAiOՑף8wv<:"0Zt7WW̷+|A[1|'jt}u4|E՞َ-b_Fw#ߊ=@5?@5+l[o&%Wm:9OPMjOI柏U1-'_۠@nؓu߸,}'&&^W[.v~}ev>05\?(B_ƟZ1NJQUնVP )N,' r1F We1.-"pa l 'ܦ6+Lj4$ʌȔ,SȐt}@I\?>0كs~M軂^p) [@̗!N< #a@CL<^flh [^24t|"4 4|Zó~D9~ `j^!$p6OBŢ h jCx# O&Q7Uyűj#v9JcNY}`bҳ^eӳ$~:jNϝ34OfQY&Lwff޾ъKnNe- 6M>,Milmmr7QPѻ;!ʹ4n7~ag{{ ,PK/lx:"\qX7 *xRUe#WbWCQLjgCU38 ;?͜j`8q)ïzDwWe5Wy|u%ξy-]-[Z].FM]'`w2{g׫SQ>+2ԳfnJ@K s?>BK }0j쑅^?Ps+ݾʕ|_x,{W-llV`l8U[Fsz+P}ݬPoNi|U.rww\Z͔iFvv;f{Bl~& &y/J>W{1>hs H{RI֝ HˢhTg5su^($]}̾gFV>|ӶHC>09 \6,p= O"֯Zt]4'nNݔ0[i[&.卂{H̋wTԡ+(AIYJ9+xK8LR-\0Zʒn/z_'y|bisʞ-ddvl7+bs-Wj?WЫP#M~HkUAڇqA5H,j)'l ݵFh/=!vPJ>=ǀサz8R4vߜ׃Aɖz qs6Wzm}R/V?(aQثAwHAz5L{)n`gv_'#{C]w9YSB |z@D%U6˵lHfUEDyFJ^J,%"Mu*iP96 !v4PS `$tOZ 0[f&M7s1%1 T &c`&0-[K Nخs@wzzLЮZ::Ԛ[Tꗉu:fn7׀п/Weuك>8\z_Yo~vz+ >|uv]vWa_*lyb 6ĉQK뢬. _zDi.T5|`eW yv$ c>gD)7BZQs1CH-W~jGdJuDF@[2>!#PݑQټ!2@FZ.񧋋Q^:bn? 7*%&$'iMb 6b0 gDwB96i$´/M]@O1U II0սJ@MvLS;ajE.Ҍ(t,U\ ic.v!h4!#F%VRIANv7.k~T)̙XމNF(zĘu,= *ƴDo'zi._GR ")H;ۉ8E/ J ejT?S5v^ LX*Qctщ^ \C*1DR#Do'z#Elq^?SډN(z#ֲ܄}8,*Do|u$e2}@$]*5ӉN%z9S5LUn̙ 3X'z;LkWq K X*Vz;堉k i]<fNv7.ѫ(C!\K>_CR3 bD'z;U6kY FCb։EFR.#NvwwiZvT}L5Nv7F hRv@,AԹkxMfRRϓd1/Gзe5[+ʺLY栌L ȍP4|KbtlUݙ~{wU:8&sqOpS{\ۓQ>>}rj 5=OQA+%yernMfuf1bRI)Mqqܸ1Yն s2 @)"yaM 5= 2TU9ߘףn*5x<~Vhv`sz*hF(g\8`Qr!\ _^v)NNPaˀQMulۖ#JleKX9pos t!uη$vV Wh5 vi\FH!Kdp<8pTtrzظЭi_KYڔvĖi]ܭaIc^фu@d:׭R_4PW\q}FZdVL 㼡-5Y218Ke9?4HDhM둩Qjl5pnj>_. {ؤC2La !yCz['M7?qeBn?BXŠ +HaEqUm:.L.(Lֹԟ +* U +#aE5ęͦRKxǑ!!H:P82‘9\~pE/nL'mVgjlD*ij45]cr3l< ײ>?Zg\rco`?292Pظβ>), sH+,2 <! %dص:.K.5nb, 8+6%B&C-KH.9|UUV-}q͇R&JY7bK`%Sְcꪪ۪ pd+RJ}@N(rBC90=K +(;v]7_$WX3!d:|aþy,Rr,/Vhd hI\ _9W">C|Q' /75I֜3H>A Ca!ӡ[ZA>O ok"w 냽:K3*L7I(1!DVf/XA۞8plLneL6$ޔSl:$6lN=Xӳ<!DQ>B&#Tt7d0-xYMԿ>XܖжA>> 2B L/}ab W+}a>0r8z)3QO$dkPdEN!ZTwɗag8RM&6dVB(&֩7XU߆W)"I6",0ǭ#9a !Qs6\U[;f4Q@6 JJ$ dB/XmI'Uˮ8$pH2i&A,Y}<eaPd a>g* }/q1⬙0 ;U {j6@ !aXEM Ȭ\Nz: /2Q VAP{7Omc8舂 uNgt,B!A=ޙ 6^2LY#ΞnƪJ#'1l-c ۏF-BKH㙐F-!qB>1BIdړ& ; ƕ\\I\ɕ Wqs%ђuh2y!!NJ")8R'>7[gm#TULo*5Zbij15wv>TS܄ںc<~m[ؼ @6XCU[ `aakȸ=pnb̽& GHL%Z_ $k zp@Y2@8)VvUqXbX HY3h 4x[N~ I@{C@}f'?I Bi&nx&YC2,!ZUli[=]\Gi8jsOcpLt@B aVa@7*UwGo?؄$B9k1dqkPdE/V_̒\%-4/sz(ͨq%+=,M:2Oe::lPD?卞;B|J2k2$c2aB?a0FBvN' obWENJ@X!k;WS2_VH0^% sY2L!rABiZ* !WI[Ͽ e,cE djPtP%#*À ,E}`ɣ8 !SCb6Ξ1@Z`WB0lJX2L?YElCU8;OW}}8?!xOЧLA!A#T; ]sÝ}\8Za}Sc2L1P)P@c!(h~_]Қ-gwmfe3\.n-=josZ&?.>]:0&.Vt}mXZ>Xj)*qI7gtvjO'ڏ~p.+"Yzsrw׹/L]\'jPy SsC$:򪾹#Lˡ-tc]w\K].n^ZZ7\#^vx`s~lu7P3hzUsbtFJ`Uu0[oX6-kmV^(Cd!fIƟRevww@c)f+3Œ>V9"Ff BUl›*gُ`EljK`+*!d2*L {8BQkNoEs[>%0DLm}eXtdR^׵ XKfPi5ǭIs';ALFux*]~ܓ*Y3JG7{C3T:CdTZdPޘJ??o72]GPUk?Zq8qxl}hTngݕ}$8vfqePt9vr4.]s5LuWm돣4nXw2Uv̔NKԉջJJ׎ϿLUD`͵: }%?[i}w@%-*Mmh}ܟ--__^4Ouf7^ާ¼9w\.o_ynέ=z6V?;_*J\Pf&V2u2'o/.XTY_w_nu_;8I{'qGs>#2/͕^/.]RźPt\~>n]_ѱ&6` &vx;G oa+0n45v nVp{|ӟ_];ջ֫ug`6g?%<;?a)3ՏV_y?{Y}ݝ>kzC//>FʺY^$A_\ \'5ؤ<ą2b8OGxRN&.gnКPݟֹeǂ y=i 9Y\;c, w]uxn",r`9U0TAzO5 {@qItO((gWM(r W)\ ("-@&BTJ[,-k/*.^ʉRxXPN@Q]ťP0ֆu|.վuQ+2d1)3֦…/@X ~[<[ZQLh=(LKK{.-viK_];;ٍD_`c=i|~!/mŎ-vٱwTmЄTYh JG]WIe*O~v}̖+^Ρq@`l И L{IomAjn&hm=έidx>餭Lȓ>jZHZ)hn/~|U"^~/iX*}oZ?|ug?ԗޜ'Wvq}dߪ>;AnEO?{qe Az?h$Fa'X`wACX>{Z)v45CQf[}ԭ*z~~-Ƈi7I!%L<eB{wsc\u՝\=Ym+ǧl}I]r[%r#uۋb6ż ,"&fbWP~^ bp~;/[bI>{"?lѻ)ǓK>ZE^)oAư< rJ;9ܩNM =Tְ:%{A[bXp9kewJ7bC(̚Н]֍LDpqPA|ò샛gٺe~i+o)^;ܡH!{4_ *&%ڳ U_d \E2W^-B%2.2o5%e:VRLȨ&c0BK‰4v^a>i;nҏiW5 o5JܝkTR&ޛŕJ3% {:rM.(ӏǷYoeϯV2秌2 ~kd6t,5¥,8l}mrbl>_zx>c Гa]oy2NZ]jp܉}(d_F^ M:!$G7Mw6<} vZs ߥrVAֽwC{y wP&_6Oiы$K_>kϣ窅%rM,"ח#7nX)4*W3IQؑ?8!< Ym~!oCyrLp66*/$V%8;5Z7J^rVKusty9˜]]iI Kd)A{q\,Hv4{3`LʝJk=kN1 2L|VX=I '-YV Pۙ!ڰF:x<g`e` ]LG3o沥rjv)|.%y+XaYm"߿nc\yMvA(;,b)@<./z:ܡӅQТ`+LϡE7".henBfPBgPCsN BZ[lea0j?r;OuokEZ&McӧS"QbLW(S}5+OBu){(۾=.pM.P:UVFӴtf j+1*ZJ%1@[֧ [mئxvkO3I-h'T-h7AWkA;L=e'~`5G~r:*)m:lt^]|0 70VzC5޲<-ǞqL,.:8xnS=|aCdpb0k[n A>LVc@[ys MOJ}ƤafS+8ĄoQd8#/c&o3Ʋ`z6*үvfZKwa#w|ҴeA2zkJ6^Baff:502N0o0$uOsb'ZNh) fhe5jј-?΄LX'UTЖ-K,~LӖ. 1MXw I[,InSWloMOFC{v݃ߴcfe+U9V#vA`N9uVd]%=d]`r# sH nߥ-i.7'qtKW-9i)7'8 F[V|JE;-:[/mlD{?Z1^ dsu30si`N{2#\i8ѶHDo?ԁ|Ge#٤HeCƪc2Af,Gלfdk95ʆ͍ۿ~o95?yZNM<58ZuXa]$&%cN0]뾷W{:s'yS׃(Qy #O:'.#kF^ qeh3wa#iYi6 e:z%f֍R,_lF&nF+>WE49)$X*JC 0WQ}eOMJ|rmiu%P[.,`B{i#`۷ ".3eh3CGjјG=K^Z`1sJyVƆ;] iփi}4DZ(N~* h,ֱFKA@*)Q&P$XNy6(&3鱗_8F;7p[]K"=Iui$}}Gu*s¾ Z˥8 5jyJ^WҲdSJ#wl⮧0z&㫆0La9<~^j|=ee9_R2]a0/:ӛt0vJvKCyұW-(|lohR.#LS}ŀs)ιjL;ԞKwWU;Iޛg_*=WJeZiMB9, d)oY J yhttkk*Li[RuzNtMtBk$RtԨbG]HO"gŋ ךŸ@kRNPgp(͞`)(ف]&p%<Ǻ809+C` 7.b6J:t0=+*R&-\;F ~9Zf<챱;b,4[)87DBN>ߢŭIR y) I]bz޵|M*?_\ݾඣn:@tP'7_gIU狼K5\giRgztOtm|'Kyls=W}~<¹XƟs=Q}#׷o.":/teRm-]Yʯn %巯$u`ottW7>]#|J !WS?ݼG&dQQ+nsJ-*M[™ l2wFkZpf1Ǯ~Q?xy4X5X{<}o17/v#e$AݚUz4/ Vu -/S(b\driܷ[e8"{fxi@G''u W8*G!V*ZL5p Mu W+/#5TZFXz^[$NBΉ+$yYU7F**MraY'l*G#%%jP-*BBD(sfZCYtUS) E+lNVR2C7ԦKǀΰTD("uw;+Ҟh!Zí墫".:]*)C+]IB)"J&@{xR2c.RY1 PwS ]!*nS AYȆ*zxt IV;GZaFS(uz=Ś&ӨzFj]6d9f' CXvBWaF~&o1>$t4F.VUvB+Y@Te0P6HF%VlIWnS\7m\,HVAQ Z;8~j'Z]Rʶ͵hUj|%.Ti$G#zTR),;@$u}ɨ5` ]gUBwu g1`dF>w*Eh)r`DHBÿ`9IĤՔ)5hIkb~H2N k44t`9  ̈́-JPUJ !#zycB+䂛Ӯ͘.yG ܗMpLP];!AHL <]%l6G2Z@9 Ac :9Hz-lE(;mk(h 4f k8hcAvDhSqd8j8N{Ft$̴Hm6.DŽ#<-8 wGzgUJ~A,2N$9"agdO'rlњo6ۀM/CzБpiVYNx"+5mk:i[+ ^Ex2Mw BV(N ytВV BC- mrnA;7xyۜ :/N·Lݖu$1Z @z qvV$F%zm)3 ¾\;X,ֶ`XSV5(U$du#Fh 1i :a䗻*%[h&%xXKtBm9򡌸 8K*Ciڂ!nq'UWap4D-;ͦ65YW5KC$`君ձnV#oBFu!҇^0pmN-UgP` Azg<~F"3.bU`7|?G sJs9[:]WѭNt4X~nA 6Nt?f%t\-ΤS?<"h>k gXЕ+H\:p|j{(/u"eɮ"P}h͞r!ITAWD(: 0à: 0à: 0à: 0à: 0à: 0à: 0à: 0à: 0à: 0à: 0à: hm.u 7j@=[]yEA\aPAuaPAuaPAuaPAuaPAuaPAuaPAuaPAuaPAuaPAuaPAuaPhAkD]u UQr}=u,ruRFu^#XjuaPAuaPAuaPAuaPAuaPAuaPAuaPAuaPAuaPAuaPAuaPAu9Pi;g+߽yr-Uwv {.]v2YƪPײeJ^7Lhd-~\b(ScM%b ZD,&Z@{UH V"NΫu%b=j⺂GU`؁bTxgGJ*^EA:UXC0"iDɆZF唫DljShaTnX7*+UV"VQ=b-bu*J1T"JeL-`}-Ss-bC^JF 76T%beTfqX#:\D"`Z% 楻.y9Dϓ7)_nvwO(ml/VgBNJ軰 ._ggWxM/κD@WN|؁ZfXQX,QԲ*PYa؀6 +%JJ7JBA2^X9pA8r8\,~hfS!uRl%bd¹Z)b5J餪D,륖T"6HT-+JǍga1D,wU[*̬Zl0=4>bN2BD++3Jq#SPRapE5br#v!?3ʫX:EW!jUؠ򵈍͸L B ;.XJK)}%bQJ.Zyk+,!k+DlP0e})U^^W&W9n,onC2/wҩ|rovG6 ȍҩ>T"SgJmb%b^-5:Y&{3Vӌv-אn髾\X t2y8Ps˔/BD&s;vKݽFm[)y0c9#bwyCT"6h!M}`TmBD4Fj/.^%.>tz}G0u-yKnn/NQz2Wxc_{URcA3U3[Q=@ég=g<3{sx=g<3{sx=g<3{sx=g<3{sx=g<3{A=Waƪ^?mv+iy?/[u狫~|_ܥoqt6JypH? >~dex+k|V"zD,W >7r;\lRW㺐%H7uST2hjY¢D&8VcJ| b߮e->}.gx?)ʔIZ.zL5 g%804{ɍR{6>juڐY]wNޗ7(?/6IL^F)"Gi1 =-)BDc릋5Mg/Xk~w=]W#u?Va6=M'Ǔt7]oت̗%;-vO&]-/'oiq_ݿ;o6&FжdO>/ y)3AdK(F"V,Go?=uƏtӟ nj"Բ! 8HĆU5bcΎ+"T%beRJ*|%buTօc{X,1o=[,^WqQE#M-F]-b]41Jd`*V"6Fb@K#/Ap5r\0\2]X+J:APi%b=⺂&bDqQ(6N++e%bQJ c׽ DAT"֋袪Dl0Z5X1LS!tL-bQpZc%bE!JHm56^&wE>w>=wEj9vwE*XvW]Y5ܕU^ WU]Z{ **|]gہy1g7ؔ䷫U';2_~3IQbrav1dY.+ɡlxm=Ͻѫ}?t}=iI_"o~A ͦIėr6l蝎?w_m aػ֞F,WP}\m*Z|ҨFU<Z'lc iJ݀7"8utݟ$dz_GpgUo.{5߷0wBz{u{mn=HYlz43\j6=z4o߾IT~2Je_>-_!ɵOi8Hsn?kVGg,#jTQke|z]إMLU&meYͽj=Gq)4<͗mhVaE&'RF⼶l22 iGb7V=)%RL$}բ$PƬEhS**zϸZ$5'~lQ4rҞQ6B6QVAۚޡV$4TKm 2^RCNپZ N%c1$Ra%&u+2$mZ-1^{9OO/fTcԚ1$܌R8* HTGTI B@Lµ4=_z̄b ؔ LoeNYVIʔE8_GfiV:$ Y2hOBU ĹL.Vk GUNȪs#DE)QTEI2{gv0YGӶ4n !Dk09GYGW(s"_G,Hb'h4ʅI 2JHJC(L Tezy(A5$ԓ[zE/3搔hVbP[bӮ,"7F\r^$l, @L3+l %D: x{RQ]J u; TJDz[ h]oBuc0݊ NV` Bz`m^jvVq። <(EJ<ɖ>GX4`Uʣq(JE5,$E Ƚ+ ջZ(Z0*aUj")?iz_J8.)7S੒b5eXF*xd68͠ fk}t:4 { Ex*dM{@Y%L+|yp,*+zZDk(@B]AIBmTz5aɸzkT%@H/%y"8N&qX*JC;29xZoVMp8d3u&<_XEc(8i`#RTP!BN`"Ce;O3l;xa'7md\p %#W 9/mZ$,>&Fj‘pdPYd:Prt9|HhtVsc ((v5kMj,(t_XK(|"2H&jZ!T^eNOjJƍ%56 #2Ara^F=Э}Qx3 nCe6⥇s"Y ԏD?^_EQI9m,9xY'O7V`vyiF{'`,0l" wU&PB@Vt")5Z+t}XwH'YLg& HZI0;̀ R)Ԥފ{)E8F 7)ݭE a M{-{D0dJ-6mM:;[ph³x2.泺͹3h4hd0u0 4tLGs ncJ`aCۿi ENeQiYgZ Qd0%I3)q_ 0#Bျ!(A/Qk CC*CꘄC@FG`$ 9D. #&J#׸ :ojgk?];*:dx(9Bmgq|\\ K @vZ^>T7PH`}_8S_nKTbf\;R4AZ?4^A 6[Cy^}C~fU''D>!ߜQ`Aůz\]W\->J @acOR,L\泃ռ~v;|vٷ wvoGsUx3BJßC uBP:,a uXBP:,a uXBP:,a uXBP:,a uXBP:,a uXBP:,a uXj:!ȷQ{v:077# AI:RIBXBP:,a uXBP:,a uXBP:,a uXBP:,a uXBP:,a uXBP:,yB :`v:tsݣf:N֪.V:BI uXBP:,a uXBP:,a uXBP:,a uXBP:,a uXBP:,a uXBP:Vӟ$ԁ:0׼:Z;ꬬ ,:d],a uXBP:,a uXBP:,a uXBP:,a uXBP:,a uXBP:,a uXBݓAGO;_f$h{y}s/e,alFM+ߎftEѫJXW#.t}ʩ=oWuj?7Ci8An_bTgΤ|ߟOΎ?np=lv/ '1:=_`%,.\dظϘȫ $Y~޽̭~xKjl%Z u:Osr1oQvfZ/->8AoM:;O~UƎ7hen@6z|fdNihX {i]7 L,罕wX{~=Zmݴ?rAk!6tSl%+"y }=V2#KW(h%yxui adWJﻔ$+ )J3!Fb'˕<ϗSxyk^3S""?r>?3ynϳ; 'FN:i%x/WjOw{b튴G}R/U,9^|%_NV8MM歖~4{c7^Xu?r(C͔3emΤb}2|c]6rMp?Ovv={\ϫm~~] Ho;/ޯ_˩օ*F|NÐ_ߺA{P}`N oۨT`wXgPZ#Lbm)Kq#P N ^ڛxmOΎkopfNdÏU}47NmA;n9xT Ω Ω95958#TLb>868ԀS0jC#U14* lFD.vXe;J4]UTܤ2[Rpg5,g;}i{ HسvXxvah01}(h{Z3@@!gE8O'=v c }!\s'Ä 1a=36ps{HtuN,D9vḦq^}UIu9J@rv{h Bkr(dxxpX3]PWLT=Mđq xD)I6 !D!BdWPlc+.ncbrR[?" !Kldhښ"qI{SD)J"H$/tH ]X p|` f҆vjnu>uju1JDZ8Γ p'Oa:y>ϋ" q r ԩCz)Xz4t PFɕ0:z&|BI ,pn ̙C5]E`hRߧR>@í= ـ\_?ˈ6h4Z.$vQBp?Ó~{˞¤^ynˤVƺ|ꖋ{>{Z\/_ eJݼFuZq=RcTȱ^DBѠ-Ϳd~OvՎt}C/-?̦pM)LQx.zϑ#1 jUB sY^B-YΈ<˟r3 smD)95JͩTkШlx[w;R Po1;?,G+I)CC97vH0 k6\]csllr*F.T躇GO?i.hj6Qᱥiuؑ l|Zj'1D+'X0xslRTH>񍯺⺯hx[&N7]\<=xI 8g UBR46m"; %2|"4 s9&ú<;(VB3gD"y@ "6U? ?eV;3_.͜KɄ2Hi0M82H`ZIz)ͫ96[cX4%y\u^A(vS>kN<捻=:rQ•;:h{9 7&܉#HNY8ŰSU8Fm !ԼVsl hԓdJ=^zC[oCG0q1G2F׬d4Bs)5ٛfqN*Iw-ZLg-# `Xؾ`kjbD0O|䒋kL.ޖg<#` [ :#dBDe kF,ޖ31qT+indGENrxp`#<ی([GRKnKHN,$Ē뗖?:Wi帱ѐO4 ) e#7NT zA>:Aȯ\T5k(̪EPS[X |CKAr. =< n@W4mR6ãr,)ls؝MJ)3#teӰJbBud/Ƃ`鑪im$" uWCgx6ᦛ׫uNف%Cٺ/j6?z5dyO1K\k E8%C5a0Ήͻ?rǬ FfČ~d-6;H1 XcG(*h:v;5n1Ҏ?r@WPnB D|Rk59rm kWwĤ+ ݙ{ ]hpXlhi\g02qq.P7@(l;{c32$I" \s?G,ثҀ1ڪ4%guszCèWS<}z4vBqUS`R'ᴩb.h<i$1@ȯ]r]T;,PrLW`~ cxľ y-]Ϲ"I|.=IG$8p^Yu<mD~킓|M֏Ze(8YЁx6dA(ٔd"ky@nigtw GY}EMX֛n\؈I D qayf䘵o?򂪓[\n|.@&A=bVe#G!2j@[5&+"vGl(EV8}Hp̘0EeļUIyiZEV*M$Yhv#OyzƖI0FID~sRRN(8ݝSWbtZ Y^76nۍD%CX-TU rPG=OE$;L֥\ k̉db%_-'0Fۓ"r5S}':Y7#y TjjkF*'Jc'[V_\}>|5Z_Zm\, FAςQOL݀*VLJ^EV%ie"cny1`>_ld(<-7>ص_>yY 1rc:f_pLfx%ͣAIE] $xDMҦ0bqQP1!F=< N>xDध_+94zxHF}rxxmXї6tE]ש8eC;6:~3l|`zaB=< rBU >݋PiJaܧlq+BGM9Z\U)u:Y Uu(+F 5P (:W?zgJT$jrAOo?.z8 1*y&lJ(G$1.9C,r֤,R8؇V^͠|Y )}t\dly/ŀJz&ᘡi2 ȅdY#ON_#V?a2kV`d0ysGFXd-^ٔl,LO,:{͈H&h1Nɐ P%g9 CFaIos v_wԌTT?(&(x߽+q+fn4-`vD~ݒ f|섺VU*A^r߈LDS~>̚xT- o v9n/|ON RfT\b2BdM^hiˆ-Bka]_ Fow6nqvɬq?5_N5BmsT6Rz+#Ȉ{dW I>b(De5rZ ڪ1!g*duSoE!OHUjrIT}y8^mKD h3HT|A. X V~{mq_ti6 q+d8'lTTg0m "c2l:ܤ硤Y]2mMUxKUR}#Poᷗ"zi*IdEOnrT7iDZL*&1E2%OV54ʊd$/U{၈ T h{,ΔnRKD T7ӸLG{da*r)YR݋ ~x1!}p6ُ,ZhUE\ X]X'sN &4rĘfO\;; B?ØyV0( ęJ*4JќVwdҼ=/ۆzw.[1mq˖#e 1^NU(;KZ ȷ;Qr5OԐI忁x+uP&jUKΐ@VJ  [|Z7b$jZq\;g_mџRPqKOvV jM^v&B8)!$*g%Fkp:Z{ w7Իw8bkL)'*WHY*F6dFX&F\mᴔ8z̰+\QG.]T0CTB" Wۘ3!hLTSo{q0hB`FNblT#B'[\H1)VT(d UUD!JpHǃ{#P7~~Y~N~9d?BS )uœQ%h^'ԘD ;1B{J?8qo'OIIf0V?nZnHra/Wq\?P~IeG_@W/{a_ K AWB\.x@ZVo| [YBe "@iD9$j1:25&&[-cUF*rG)mKD h3HTۀD ̐T"dn55OgSrg3Z(|~8M7N%t#.q"3M!SA/A>v&'[Obm2#;戕H#< UGŔ]ߡB .L&:S^un2ݹٯm᱗"WK:MNQ؄X Őw.]`b㴲TI譨0Iǯuܶo wtSgOy-"$c=Txe Z89c$Vgy?QlwCB }iQTeh ֻEW b<-FYneZg:GLWj|GO+ts?͖K> . !f7hA2BcA.:KD]^*!/B h3I4W5$(Zy/Ny[Uc)r>!rI"Yĭ #PEeR]XJ8YC ?z f81gh Gq [Xflg\rf.ʸ?mWb}|RBn.E+rp!Ϫ9 uviiTXV72*otz3/u}\M\E"vBݸ\k+PI*뾢\;1˜iT&',>]fpȻ`|4*mSF$5ib]2xqDOU5nz;;!Ľ's79FgWWI?Nmܦ}<3|&MZhD܀:eud _6M]?ŜxI%wI8kdjU>[LVoe ݴIk@ՏK&{esmżSfRs@hgӯi{p 9nkƃ/4Uc/n6CΫGcnK7W/|=/vVÓ߰ހ[X䰤z$&zr7/8hϯk,S-zœ(/Cm'?7|I5-8>Ǡ7گ3/ǽH LUKM 3?yAV`R ik0G" 껪]7h7e.p!>,L}bt̶_7ŏ^#^>.Ӗ7ݪڛBfꅯEՉrl0YY`MKnc^xԋeQ;gh&V-_Җ~MZ}6߼8>@tЇpΫ#H0Qb6f @vŇh  qAC|܃<<~:Z{Ĝ_̋\!?YOV`yjz?V6=?uwoaC=ahu6up|dO >?aJ?2>2&B=~ q Щ=eyնobZO?tંd 7zԁ֏fpٻ!t(.'?^VaFg h90'u<B#*}ƛ{lbFQG%撣C-tl0EZGuh"ԳQ D у KD+`c.a6Xy!%!޹6CIzx(άUIB{D!ILTe쓳xKjܓ3t= si-1_pɉ&~ac&|r7]n&7 Z6F҄SAVM p=ՠG;IG7 ߝQǖzdX>"4umRAU,:\fV}:LMlutYܛ,W 4Qp%;c3bgC(sG(weČ 6X gX,Jnj ?cJ#wLEcv2Ae0F(I+*0\N]/?o-tgw"mN 0 lc9*&lL#٬,f3:+Pu?+ѭ(]j11,ln> ?WPxT 4fDۓ|xmG\Rh!S,TOGSOs$ix4A{EB0GCAs"dQXy  <̲v{>;S7E>#Q,숉b){!*}y7X=={j4OdGP?"LU۱1^tqD("P +KGcyfmOFk=b42MTrfVF@D+6ǐ죱Jsi9M 9x<0ܛisB58)EōRPC|DאFXkFE+e3-` 43`[]jz5QtD7D0{-Ri VbfaIg )sJ !.> ^y/>#^Dn'8}AgSj#Y)D[UeG.B>dq vZ|_m a l3W]đk}=0((̛6tߝXXDF:sVfWk-ta6FI1DW̘)# J%8*Zf( *bEJIeZCE{F;^ߨoAَ*0+B㥕i)id!8V(ֹ9KHeo3_Eyv FཞIFϮD,EyP[nEc!2*"6D56_@ 0c*P='DҨ$cԃ QL'`y^>.G q:=!m^O {e>J-+hh,$0\4_+x''[;tu F~IxP2bȋdb1tP&(aavL 0YeF)-`KxX{ht&5̕T2+Buu=ah\=nj0y8.Ցvg$´$$:a>m̆zc@҈J!oA3;OKBsPDfiSIN\JIQV~Oa5TpCCP?E!&2RFYhM_\-w"z^z+ ;߱ɐ$4\А:y2I3&'0jDz1'yW,)+pՓx~me 2iFf]BR$I4 =s$ e,H:a& D5ovgyABsfy/F4T)gsBӈY-ބHa`Or8LF2 VpiHZ-yr' A1yG-2 42gNNqwH}e:qjo++1%(m-TfBynT?&\SYf7Tڲ*X\S(uN^Jap<]BރI`Y(”.T(vo:J-z[`EQ4DsLN UATW\:}ᷩDK^ICo-Q0V-cHSI9_J70BFRaBޣ@Id#ѬZB ݩ[c%ڲ1Dc68@5|ZtGJX儀R橷D >cD%ҌI^="ob Otb ll˜FG0NL5zk{l Hz~EFVp^TR$ۧmu[f-(=}A]Z 1d|^HΥ^P☈T1 B$0@ٴh"nbIkCo24:g#?:bnТ K"CG9&+i;la$ETmBTmٽ?^~/'΄+O. z d4$`%&64TTސ{6P+ b%'?MDCF$' s(GIhx8X4!o|D y {5Y.R8dnE7n菲TBϭg)#v+"Rb]fZ(No]>}D+v|ֳ,"!ʛrE M#(=/Lh[|e4u}m)P3q>a(h]:+Q* )\ZT.& xoN,%[%6Yˊ7g%K93+/f|1yԄ6 s!2V} WǗy>fnsT ZQ20(JO519G8@y^lf`LgӁw߷Oj}s0f/ҵx$8#ʣ';"H:wύۏ+ѣW;Bϩ#]w9 [_їцD TIU~oTw[q$vTM_i/ы;Zs@%l'F8m>j'8j⧣eB+EVI H|]&װ&ӌDGH%Hp˾Oӻۯ4,1ewk<07َ~k~ZvdmDs$kY=[/ݶ[&8j,q;+hhslk-hNnk*`1=_G؈9"O_7Kn7[NMPOTlzLEs9CbW(,6_g0cfAP+y}z9֭}O-ޗ˛i䊖9`|;faGpZo TW.>v哖q~>W?;gkiFK>+u21GEU<]qxz85 q~bPtP|k@<yL_IvF^H%8w*Ж3A[JwV9:,Br U #b#{C $]9.1ieN \[bd*d%@BJ$E^G،ΠId8i.)2a: E+I*Gi 2AJ%AfԪVVr0W*G4JY.0<n{)=@Xa"|f} ,ǔ( Ȍ#1;7ݟHdTmi'Q+ Ǖ&NG?GfٴZ p.ϼc^B cb~ӎGbp}ou4go{FZ?&%}?2Yɉ&Q8!|1XjRfHvP8M0@ TH fQYw5z%-F8g0sc04*:>'Y9#)1VTDwMԕBN.JǑ%v_#18,~ ➁Naj NF"~ۇf>`x=<#gE%]A%PAR(d4sKRpNNG81 ҲEAr-LCA'dhhh#9Jg)JpSBW7^bTIhCCOCR8{x$' !a aP+`>sғkބKX!)nohG* VTo ; iANկҼB=<Mq;}a 5qJ71GӼYPA97I<|Χn 67mAҲ|_ %(^#18´GZ@k!) 4~~_*X{p;@$F%0*v:b'w<}Ϛ`- 'wAeoX,2v .+0Oᰆ(0}@Gbp$=˝t;]ܹܺj"m{!.y ڏB>YyjC"uC 2Ա)3B*sxc$8) )%c+ߙYݺf95k QZ"΁ SLqWR$x;6 H Nt͠TcnbfՀ~I &rT`1*}@"Oh x$'Fh1M&LǼ¿K5-1*[lRЎ836"Qb鵽ΰU41$!/$5q:/xO߈&w:mېF!16)nƸA HV2M;F+bRhO GbK!=<#9LsL#2ǜ,UQ xNH 0dOOYϋŢ`&N:|~b\b$7 #(wD]]jZ }>qb4tK>OY%FGH\4IAW'D("&p-SŌRy) %(-R"#A5Vv&A-][0L*G0J-%GZpfN!ZOZ:VĎ$q#eZW$tpC61 l#o+J FCe=w΋]UrI- 6iTx %M.Tǯo5*Zu}V1קfj@cz=<?>4Rٴ̀i"rE>q)P'rE7vB#-8*}2T宍q7X)5,Lb0 kpJ8klJq-,(:іbVrH=<mfՀ .,0߁+Iٍqآ\be^ۢI#l=<Vso"M@uTŀ7Gbp$MCQopq@+ XmS"G q'Tsn9Ox$'B%KO˜DVglWƽb$(J#188zE'T9P -zD(@ߏc?}hE(=4F I1֑|O Pr5_.VM#{2_/(a#18ڒS]#qJFFò=O<1" z0˛r(,<)#*حzj lbp;f@ ϹGbpH|hv?B,zx$>tM7*=o-BPL}"BRrRb̜Ҙ#0PT<:B9JDž.1OBT8Pg/3K4&iPiWbD|CPx6gZ!l}ƥh│JPytZ`.A_7릘~~)_xo/伾T zU٭gF&1˫ > eVrJ-Qmo^,Dzl&ko ìNFv|ʻ_337n-$Eaf-AQn`;{mU[IjU%.2ZԖ)o6~mfZe~r7?VWE1-2 ?5F*gsMԮvfn|n嗂uvTa _-~l̪z1ol A3|toHKG/kiEMfk ϛޭ̼niA@ "mϟ#Lkr0W')j;"=LvM 0SsHVL(Օo)t +{pbq/rZ ¤pe i?1ʆě{@4Ͳ~su?FKF7ʋ-kOȂɚr=&3yW I6z<_7<͉^7 py-"ox}ug6$ ɛ) Ɯggu_KG-i֙T+w=Շ'e{gf3v6Kzv}6_B6F%vjd׮fa@Q_AOʵVOt,W]{W"/l ݩE}T{^B';IO^kKmk-WniV9ٳ{>LHm>+o\y ^s{c%Cj9P}~9e ̝?V~B(ڋ跂9, TGEZrJ &ptx`Z!$vc 53x٤+d=z,088;pH~ßw_g_iza9p ޭ1< R&7{Hn_Kb!@bN)1 Mʣ$)JwD{{#NUz272_y/~QB"j RƝs(8 !բ`rZ$ (.Nh^*9V@VbQ5.TI*t'$2 oPNp u\OzqS&?d oik?M~?=i 2 mW?݊vU l[mdz oؙL  >%srmogݔֻb,`Q2;`!(T.ݭBׯ]_yM߭ٺ;&}ROlO-~EsmVZ}:x^|Lt<[֥>?t^V.*}T|*? 4Cе*Wp/wbQ^4ΰd^Yya{׉lɤi"^)mNd )ٍY3#Sdu8h]_ڌkp|&cnrf@#DPb[P0p`OM K hUtiM4gq۠>߸alZwp o זah9Djr Wh! }/__7x'_zMDkՎU':Lָ[.`--C!Yl$w~ϟ7 Wd|\2/?&T7wF>~zַ7Fl:W.JK^,Ү\Q{s0浉י 4Jx1ʿ w0`k[ΤO_tY^E6t]x".kdM?\Õ%'slj98yw͡(ʲG1~}Pr\yz!2ٝ\fQrh4tXZ2ӚYxYMƱ9Ϻ qðwl  Yx.WY6prk+ (bVRorqWΣ%1Nڲ^{%7s𭹙xTR++8^\ʽe3BvW"D2/R8ײjm^&V`8. $$T{q][C&] D* ߹MbK][3pl9k CPy)[?^nuҮj6i!rԥWBVYr`sW~Tp'Oϣ>֟Ns^+Ϥ^HMlABC]qvj_*`'b0v+At~<(Rr=d"#FV(8I-j.X[iGݵvy] ]6W>~8b^PK]H 6M]%݇-]M/7W)7DȤ/ B)t5!PQĨDr$e=rb}#$ ȾWCO 0I,g'$a|JG_Q*LEry=r{ɬ^0{|?܋{ w^(:Mbkjtj$SzjSgԦ;AX&M$C7KM潖&-5]ܖG$J}SQfr zZjQ afp |l׏4'[OCe k<"Vm3˅-oɏ ǩUz `b2[=[HqdA2%NҖNh;齯z_%V64>򡼨Y_qhOxJ4epVltNM+ ՟{qКnM6wwO 1y@@ͧvqK;V0QCF JEGiZ8H9c1Fs_ *n 0<=MƋ}%k pblCq3}.)ktbwոF"`vgaV2Q>)Er379+.o-rTϹw^ɞu[]ĄK2a;2TFH@AO#JIHf l]r~2# `ׇ-@>o<A{C4m![yd{y\2Wo[VZLHu mfM3|عVUWlk2nQEl#H+~6H+}6f;~\K0l\x ڨrRm]"m}Ƹy9 q5o[N4-g3Z8+Ǽ䗀oB+6$bq IoGtf\jqpUExI;?yVꃡ6U+z琝M&|ӟ)+5 "'ZHI u«ʅ#**嵨%?rTcI1H<v7ߊWuծ鹥,Rr&T?!70Zf:[oSSFpӬh*27yMgL_|~O~V#G ah}oHfWzt!\ݽ{j`(f]Ɇ崀,0QP EFi/ƀc l`^N_39?C3ϜGH!W:ʲGݏa*(>pN[oWnib !px) }c1Rciwv=bش=>9]ȃ֪^:k^^oj뗥5lik͖`28eW6?KϜ vj6ţl<9 y; Ͱ@+dλܢ  .j[¾iȮ秇oӬ58nB*d%Nx?1G'nյ> %4Ummr?o67>|_Ef!vHDf`_GI7sFa,~b\@뀩Fu$ּZdV&! 2Λ`r2*<8Ms@P*f1  /^VG_U_ሦB|Jp)8EƘ9d=(P⃒X.v`U*7?{8񌓂iqOIKjTT^.1vnM S/(aN0DΘ#3te|4A.{;7p" 5RBkl44pV|"|Omʐ$ u LJ Ŕ4Oɢb)qq 1+Aie]Ps\rQ+14b` JQ!SYyE0L;We#Yꪶ t*jLHazm~GI--#G$(:p9 0r-]:j 7`Q@,S:(yܒwX0l.[KT/*z@ʤbEzKTzh84$#~w/1 %|*RO\+OLAG[&JV6g<ND$gDZ`>|2qa.|ɧcmK##@!e4rAԾ*qbz4+\*A=9I`O6%D( UDvhoR^Nro^X;"X hjJ{)5 m wNtL&,2QIwXrhnˇ$侥}_mz9-w srTOߔ>cz`䞆'Ā`H862q+ L!3 BZk@\X({Ub4`ߐR LKy@L&"GFa& Dr2b8]\bR_! ̌(XJ~ʁ IjL;ϸȤJ QAr< Pb81RТ(=`f'aܰ29Ļc,EA 00ۭPq3kȢI MA"6t>r >DE>H]jR_z4\~H^qLBEF+TX $ g,^:Q4-SOMOn5".?=d4s\}M`r,Yh@˨|gY"e[7KA P\aOi8W*u?EFȷoHX)^k8H~ߗ\YG x9|DoCiT@ +Lk8P hCd[ <[>I=e5o> 0Nwû;:j658%&7T ?_PqefTTFp5۵&yx`w}!šbۂpr5<8]qsb]wVɽKen|5DުQ@-誇Ce ̥z,lc1kl9ͺOE~w  HK)w*Uq .TCO7ѫӑRƺۜlޜ2[-iĂeTPp:%FQi1lA;^V}ҷ]D>my7,RHnrYU0F́qia<_.|wR;fZ;T*C+ x/Hd^vHuSi:_}w ^!+%yOmFk(\1t\ 8NJM]CdʐkucR;H c ւ$@q;&--w/ tN@.Ȥ)u X0t* VMvw}4BohG0#R!&t{F fk̥pKݵ(:H#w(*w,\gr5DuR=q 3)b0z J7y1՗I1tD2xL~{>ꃔIC=cbA s4}X<^?"3jW1(% #U,-[Jc̶,)4E`q>Z35ӼL"?nAts ccց')  POJ!Vr|cAs,c'y6%|;jqU[K&oJk6wE9fR|AdZ7LeR%$RLewK[禮-CssuA1Qu%ŒJ hFK3Nv)ЅB!c p9ݶݶ|>P<:@adro1qⶥ&cgܫ1qhL0gn/ 4 KT8I0&LSBbKSi(*!!J͠EcvV-CbpѼ ĢXTbP/*Q 8c!rÜZ+0^1mB!oA8jwk&P(vVv!z憝x%S ms: ˋѡ~xak+ Pۺ[UJѡ/zE5AjֶQhm3P`,I& DU0'W{oAy' A +M5ZYc8lBfM!PQ 5HlzM)-d@zUwAp'pn'oMgGا_l(]@; ^laF3oҢeWgqM5#_Itjѽ_ P ܡ.tA :!foK̴Sc:'{^,+1 ; ,P5\< RIqI\,^čfbc]P!]Ξ_U6u+a\~i+ËyoՄ2GgAպtpiKy<[=|x/yչ̚\b<|C,/asnTdg9QDg:9MS9q$i#2Jt010i3ʂ$8U\2^ Rq =}1TjY^4?߾;Z$0Uz\xho3wL͵z4gcmډ;^{prTZS̻8M^\MEq ,3Ns<\Pǎh8Iaϓ妧jm%]M}?vDvf0|2sS,d_=S RЁizDCJmRÌJ&zcqž MGܪNߥ͒=)-ai&8wQ(G1 ^8#JIAEv?=\(C@KWBD 0"KlCۥM#W4&6\}23ʰ+'.:ü;NoGy|x^ܺ:4\uܿm֭nbnyUYP|Y&-}!2csyOz6L3OwδJlPV{]j zMm;VjT`y\۔NΣp֋NB~/xRCk7dQ 5<0^򼙶O;B:K­Lsf7LOC̤Hy E8QR7S" ȑF+3\;sd {s]PqV+̬yxi 7e= +Qy-@i #RR sFL.x{ :d 9gQD ٢ 753tٻ-=E[kd}gk?CN鼻C#.Ep.E_,0ЖJ&\P; jGhZ{*@] k1"zNOzYUuͷwmKp#&Wa^x@$:>j3bb`\ ˩ -fRLCf>pOB[#0ETYKbr>{,-Y Dq̂f]P ˁ4hLIe\PPP="NP"!g{ZPgcMz-1 Oi21dN6hV-*&,Y*Af-,E( G%}0D-A_ߎf8y5C'1̵dkmkc^nbɟ2yrO3F2cܬ\{X7N`%(qU)4yLkZW+Z.6S-ϐzqQ4d* VRV\qGˮv'|$X &֑/'+ݲǷtw~4=0S{bM샮Xem3gr;/Yoo\a~S8lk]|NM F#ܘZ~_%7/;[/4dۢKZ) 8cLz]V#fjve&u!]Bhk5?`=3s ԞFxt`/ܹ&u6l7?8+Co9,DѵYn|vqv`$H{5v!lTVR06؃&wg ~׬֒TKRQ5[ka!~?{8sVK> vrŀ$tgOQٖ-%;CiEVWdjT- XDj%/BplX:Loo9hV؝twV8"jc3ƝRY"h@QAsglrD`p'!urVǜx<[<=;2vHL>~֥L#Ӄy Rw%Qa]\B2)(fLoRo̞^W%g<~#ǻWpAdC ,٦W6] -,<[%[>,T@}!:M!i<2zsQO_O,&jtH8ѬЍW./BAǥH%+/?`5%y1/{$\K͎Rt 6EQ ms?Jh>׸a6__'{8baR h:v0K"1|1kZX>[o榷=wy\TJyEmI7h&6ӒoæLǩ D`8vf1|%gS4ag[ޱ>9&l6q.`9ͧaRw,t=)lqsZ(ڌ-2m|r1;˜̶~0Ͼ{M\eڼV}˫{,qdQ>;evURZ̙nw../ֿY͔4PI<{zt2pٿ'yiU)MrW+Kś.\t:>twUEzb_wv%Rsk[apG2VϾuYiܦ-㎷<%eIF' /wX_8o׬I^dZtё9!\hv -{9G޶-h7" L7]TQp8s eZ{2>H"TQ,VJ<_/ x$ )[X1cD佖豉h j i,@sO:2n;x.M4ed<60TnZyޢÆX">7.-gLm躴Oyen-Kw׈IZq$Q?F0;5dp{w8"7Ίx ;͍FVލwqasOcŸxx7t\n 8EʨpKs1EOBmܻ$~ wO<}p?Îd%j/VCfMIf 1CgLj+\&$.IĽ;oI)VGf9̔sL(QmGs+:2,"1rbrX+H!Qjx_yoBJB8;$`/EG }N,ڢ&1_=)9OVL L2@]a(EYKqf=Zi =[~?w17ō8`1waʽ3ø)a9ͿNȫ2ݻ>mOm]8瓯{ N\'OqQzX )DuAٲWyBRix`,f(1xf23Ń/S!_'>I@FtLH2*Cz Y@SJx->c}68ږwa%%.)On̛Ky4 إ ݄`풉ff| o'͝4|Zک/Gwі ?B{D䧸t웟&/~\#;W47wA OɧOf}>G:>G0'4G.G܏J^PQp%):L q|uUr9*Ň;9j)f guY˦i{BL^³'8MKx e4۟r3(:*|_ߎ/XL`?N)SY>\_7kG$ .IL6ܨsgXN^23 @4c"U0nQÆlf% ,Q߈öEb$])ktڙa[se&x{ŁJT.;fe5*%q*7Tm悞Mqd:ƇKejhIZRewUU91vFÀ`r; z0RUQ0!hDWw6{UM΁& <ǻ:9*ݡ9_n 8HYuH71k5f,`OiDk45gDt_)R :}8nc3}x|2X8Tn^\yޢÆC+?)OLnnij.-qǓygyUƃ5"wVY$겉ѼyN>bws.Rhi8kmgJ{9_!e:ckc^cy5H}#SED%,T%xr>yQYK%h8R`k--dn_QϺ؁3(zYgEWnނk=~.tGW[~bol8Гo~x }d" S~A'AS^ ,ĝXQ(NY s;Г,ʏ+5x=~63j` kjIJlDP@)/-sU~m_1?N۫@1}T[W_{={Tſ^ /^6QmUnFqܻIc-}6*: 3++my)ܑ F X$1 i%wO.Imk]Cl-$p& {{h|c]I۟~CEkiF^dC|5ԛҳEXʧ_l_(эkۏV*^f4c7=c=B7"EYP$ICMw?=5 澨/s-z}ag 3!eLXό'!.mD&$S#Fky˝ |O ?SO|6ՓϷ%Il 4i|Zb:|;x*ksG~ugˁ<,5|jG"qoipDHӮcΚ'-HOO]K+p\Y3_PAx%&IGmwa.XK#x).^3W9~oxx?Fr9MFw.0 J"P"p@F Ѝ܊aaݐ,Au.;m:r ;K`&f$X{ˇt Ϲ^U>},l>q/Ċ6[u^:lܼ xrllkr=.vl{\c1 bu tv@%3v҂x\U.jʤ!nDƆ2WMSU9siE=vbW0&Dƶf ĤqI1J2B?]0%,{ vYGNI/Ngօt1O=)D9vfcwvo}:@4 EM~a8+IvgC7 dr f`1(t&> MH(/䜟Ey*U:FL+LI?qy9K9u,Hə*.Be2j$FY$:`pO 8{C}kYڞJxy*Lq uw\09ƥ%*E1P)YH:*<0͌Nalr1μ xC''/W#z薞t-F! #`;{=hEVs ?|wӥS6J3JJe`skHd"tJDp*"3F+B&P ޽8W܈ӿis3lgm{"`-H I< s9> ss\[+ձ_[T^I:|u{pZx=|/UD/M)3Zxh>cF@2Gk oC>9;N4,f!m?;5'k6Іo4j|ZWoVVl ُ{vO5O_hEgdݹKz6^ ܃^mM`ypɗ~]eKutޤt7i9m81+zqx(Ǽ:ufEgKI= ;X@+AISxm~~P%K)LvEV|5z S5}7? 0Dw/]#IՠL7H HR]bu1߫\pcȫ@Ic*Qń%U^(ik9[͢Cڞ׹iSE7b&aM3 od{=hYe= Nyĕ҇Joan]12?O'Fxi-2$J孌T~Swf\.gH,cը:4g ?[垃!^^U|D:s*(ZUA?=or&O=P[wҴT6F)X;MLq\9C8&QMfpR9*;;Ͱu_,#ѹ&iY#=|Csp IyMG4 ,Es"N)}rьf}6}~fvR4\*w;jyL@+kx*':NIR9!1BA׸ƠSV dm-D8j]Ms&M_VɵꏶW;25uzO ?›#]fC 3|be4 L*/<(jJ6rDŽ]kaY=# KB*LcJ8@YϢe {I#^%օ$l {Rtc8(8#&IEXPH.8yQŢ,YEϧNб EӊM;@)L6\e<ӫ:-ϣsElkh7u?,Ph؇WJ=q ozRACQL5E\tUbgp֛+Y>Z_iIqm3F}7-}HwIW1@BNB1*@RiG#M]Hĥ=`KכHv >6W/lt|Ҳ},vV!喵3ǩpwѬٛƧ]^YLpHc+ 9F-SF╝1LCAGy]?]>.zr ³#BCJf8(K͐-0f(|{Q 9q>ѼQ.y>oU//ZA+l XbÊ.t@7k]&Ac i"DĔ ON#JII8TqJ,BdTn:a) c]:s$7W> {?0-*=yCDrY#!Dc[V[\$.iOu(EYPq'yk̩GƆIpjd6ifH^|f2apa)ҞWቜO)=ڞ35`cR"H10̻ătaX1c1F 6 |+7 7[F%_1£"'+1Xm6?-Մ Qml!Y* R!Y*$Ke(V,c,M+VH R!Y*$Kg 8_e,אq$ ءBeegxEaDPFx1Fd-=p; =,Eck(p88}p]!J٦Ē1eK6GIyKbflJ!GIr=Doce0*ps:5e.@9 JT*ԩvyܪQNzNt d6J3JJe`skHd"tJDĂ*;hR(<2w׳dtsf Y[/3,t\<Щö"la،4cX>IB4FFZcc%cb`iD0zp6D¬ Wz,$;-ȂÁ:l}'$HyY$lv*x.Q́@bȭMM֡;;Yy=f>eX}nJWϥѴa8lPѬ*Jθ~WmxԼ(z۸EK%Cmovsou;*cRk)ږz]9Xsģ2M4})1a<:7%zs– װ1$;pÔDXp :TLRJ}>RH鳌 V'*RhP(%FsJ*ԩ̄ 1a=36p*)c'2$wmF1byJb;e[V,;編-˖ϱEY}:.#U_G&K k,}fBGV$&Fd~um`HT>HcYDZ1?d0zU^HedPbd% A IXV#h@x40 X&%Bpey};PT 2sSb lVKMi+^X[x !$EL Nޅ9qZ,i`t]omfxT)0;OQ+zo&9~;0TI@'KIw 8%=Ă),߼\F<')q}7N2.6j.b=aUqk]FVF'MV%u}V37(hϽL_W{ϺBDs,zlزa $ (L|A7̮ʿ͈ů=o.!ewz[a_pVZ3ٙ66Vd=k)=D;*{p;Օv U.5md#o=z`ukrd嶎K{pOhZ}xL<3#-z!+q[M=0ҵXhbcziqG^=mޕĚ42[&puٵWN[ Z}}}ɢfmF' F?<|Jz<[K"hNEF:g!r D"镗RZvUqפW\X(=Swdn<*ɟ+ٙ$ytQmsGq8ҦUZ!l2٦4rxcTge\~ilhFn:&4#B6-JoX|u;O5Ȅ;/(pVCƠb֗ ^ZR9Ȕ⚅"+h% C#7F"B8m@m0$EfD4E Bh\I "@*$teUspLDT,hĜR>`hߚ G"ΖA +*-e9sAjilfuYm[;VYQN8##ꃱ44Ťvb1eRQt҂$Q ɊVDLIZ(>9Hβԥ|Egۓsv066tb׉eK>@$i&냲Fѩ]@D jΨAgvgӠ׸siYG'ߠFi8t~jn"@eDlq6,ZD3TX}AKB);})cEAa;nbbMg;G ]M:|&x:6c~Bo\M9^E"VQ &rbm+H΁FK@ѨmE;s΋WwN.+K8R~. z!zkա:fJOinlAyeKfYd1D_2/Hށ;}ܦ!-oŸ&)PFycv ﶶk>0?[.gL'!He<ؒ}B%'8 (r%} 9i%IEI5g-əkom@B#0@|ڋ {XN߽q5oȟ&/Bg IUOtO.^y(o&'A_Dav kHOO8avGY,cLiب d_o |7\(|.Q[RnBMd]@Cm) |?s}cOA[bqV ,RZR@s^KqUx+ 4Yv4YdBIS6KR~| -+eDk%%+Yw* ٸ=$OfdLPwCIݱHhhe<0]j y42_6|| -[ZyWgtͥ:\)Rm3.Qj.~KQI!+" 9>$ ~Qe[GB [ Z$$ ќЂU,Z85xLAfP.%rZ6W R=$Ed%JDtGLGB >QRsdEyVd a\,P8ͧ:F9u V>Rv>GZJa`)-sN0ϓ^e3#x ʻ($L9Xs2%FwKh4 mK I_ i2I!I]%ζ>P䊈r* u65ICמ!xKk!th.@qGBQvuZ~Tr" 8*K}$D[J"5Sf-:Pt@,q&DUQIC#Bv>Z}.lIAFAH}=$ 1kdT)up>ZWJM5S+ )E%skXS+GB 55x"jYiݔGE)zdKCB F$ǐ$%](Jp2r#xOfC1*7/v.'!{c7:H2[%gZ+1czHXM[cv4ik liB,*6gEVK'$&FJ^nrbA6w#xQ'R ' Mn -_@͒F9+FD5}$/ > _xUU5e(H-'c8#x^1f"(fIcRdP |#.=$O51pBi3D^` 5ғ"Rɞ nvLp4L _T4L~d@V(SǞ#x9Ѳ(nU"UGI6*UZGZz [Z_.bU6B|z^3YKJK&3 b4&zΉ-6"rL @MD (&Do#A@9(^ڦTGy -K@|Ld29``>([9rlC\Yi "@Xrn^(4![]caٺG"iw Hh(ڇVܲ93J;*Q] -[Ϻ#9#HK )m"pIGB YìdQXBL%XHmJ6 R}$4xkm6\` %S]c{nĕCqqɂSpD]:yC8+:3,79qJ݂>i< sc%l2pKy᧞a^G'Z.Ѐ9.sul.Xk39I'f('4Wh32W4`٘:\\>szb \+9uNr(87bh=QIn^5`xds#׍Vu4pZʾ\\uA+).XDZM%}W7W)o~fX~]/O╾K "`@|0 3=j~V:f:/×61ʋ(כ~" >IW=]'sOxK5B'ŢDHZ *8LIeYnB^ʢ'7y$d qd"a2&bI-e)Dae)\^~Ȃ*Ϙ׭,VA -aɥ.C=s^Xxw?ExUBms;ͧ.ΧmXDxy=&0˾^$fhR#/F^Hq ::\8:Z ~4XGp4}} 1bl}1P}vz(3:4fO~Xiթlk]FӪ4 䡣z6{.kYO^>H^/(lS?蹴?{ȍ/`@7 ffHo ^meɑvwTK۔% S|<<^S.rS$(!p$&\SCLPQĨ6϶p)'°\;^j|}Ʒmpg'U<lw Y sͳEYPP< Ux˫i KkJlJzI- @ D%t\v)0yr֎$m.>0IY%-;,>ÊL2m̀ LBk>W;(jIgt0lbzA%i"k#1hH,x$10-cxNs'.t0sdr L 8|Tjoñ]{9ވ_2.bZSGp cƌ*0l:y]uIAkBN zz]3DV.jZ@|*\o$!bq12;c,X#^O4=zp6D@j)uz,$;-ȂÁ:ly'>wvTŮ@Q LX;MLq\9C8 9@lr0P4NG<#gϠڅn~Iϧ_o&m=tӯOn/lk[V1aϑ☺9cmϐ{̠5=SVx'7fzcŕh OC>1[aw ^uwu5mw]h%3wFdc83xLl;'uS_5{{w=DݧZ`uFݹ2K L`^7S)U-KRCt2}?g8Cvjk iF̢ 8`G{t$q:|^B muaH"qOW6O?\&&Pj8ԈGːc"I0U[4"T^:bqAܧ`08bQDZ%|:Qk^y :q:y% _V3WkU66x|҈|F?)".()]`{W:I u QX&qb)|"ۢWҾMo)r=-޴2{YA+|[#7(Z>+D<}zt&. x8 q$"Y9śGw"o TX0O6ed6uowbFu|3Sv&%koM~\Rj,k*5;DBXdL9 r@af(+V)w8IJCb^眨AĭA,!kP_Z3#_3D)EZdw%#d@a8=̷a1hr7@iY@avgl9GZI%wEMP MDnd1at ?f⬝^(!aIHeӂpp!tngQ0 BX؃%$ё_*cHƌ&KF1I*‚4eAryͣ(^)W*}+!c[ѫUIvHI;k ;P_o ynTTWuGPlv7N3x8v:1tr:qDw-%]~7 "KrEU =6 yK BԦ5&џje0< ˆ`hc"RɼI[{.lӱDo> Dn֌Kun/w(S6d#~Th K,fƦRpTi$SNH6fg S+vFg9Eu!Sl~w}Wv[Sw8>KTHQƨ3Fc&s+չdnejO?s+SXz[+1flU&|UVSWWJezJbjqF V2r.*SL-+g焮2y. Pt-ՕXmI]1;C\~6*S+驫LU]uz} ]]=`-}v$r"W]=CcmiTvLI+u֭'D(H]e5>uՐE]]O]]e* -+~VS.u6 ?fsfÀfȎ4X\-@> JXDTk9( ~:gu3'?ڹ',IED{%Q6f$Di83K= g-6A/#7y!ˊ=Z{ը|7DAU`?RB?g,'^ ~vyK ^Ұ^PV" +ne 9bP(7J#M|@X{"Ns<y0`-(=u9^҃͢yt2mC2:nyiKNrdT+;v%*E1P)YH:*<0͌Nalr1μ xC >ެIΜOQ6 $$;v{\d/^O_AoFf!G t ,ĝXQ ⨱SV|x<Ț N^dvi=|;Uq[6IJlDP@PS^Z~ÿgCЇg}Ӝf^ۛ=3+ޫ霋ݯY ܚpo%hۛ7Pa0qG?lw/rvcx+wb=C67 s ||hF]k/07bҠ#o7KБ׎Ik)ameSz/$u/AOo%Dqے8㞗 exY2yYqJXwk'wM B{5_>K~{ Պy?Ȗ/?(Q)8vAv}/KCzosm$u$|19Y7j/#ifϾ5RW"PQh=Ke[7dG6؍WCP="=Mđ@Esą'H'$$R*ZɯV;J~:PhZC1[^1.:,&OJ~{\qKn^}qvj#6ݦ~^@/_'VIJ_wޕm'#WQ忝WO_H!v ru3>!p&`[E)H)>&$JJ'Je!RMJ"*Ґ%J08r@"triii RE;DpQƄ23SYE]U`sֈ@kQ1vu|~!ﻴu}C7wi(FmBLfnnֿyƿokp<.](;}+v9lk3YF0_:beK`Vx}5Jp8NbRˀPV4W3~)|qeNi.^)P9KsO:ȔmL:wqvpW_o6߼}w(Ʀy'<|٩VrMOLNw,aR)i6XRqp3"21id&rd>w*hg_t{HG~a)9BJe*Z \Vzb鉠q:qqt`";H*[+b$Pn JX+*cNj-0G23p fQ(:EsT{,F 㿞F}dVoGg=GxKw|PUͨ<,GՓU&fx)ڜ7)jgLIښpvkƊ(jTneDenӟezx?97Ho ,sI-WC +:#".xB2h wdd/Ժ̥K )S08h56UL"@$ ɠ0DA@CeE!7|YE :񾆄9k2ddYV*JII\ֆh$EB `eI3#@ʥ<2s$IZCJEK }/$B8SQ;YEwk= ZۉDIFd%:9r:9 p0#]k-%?'Bd4xc P3hI,eR-E$3Mx!2Sp.)ɧZ@l=TI-[}K,xz%үOh֍Uw.R'qD@>=Pr%Cr,őJhEXef u  *uk`e1X(W _/~<QJK$y>NCTڏxjji`0DҸL"&L9Ґ^;mߥʂC\_* EJAGOkq%nH*EtZ}y{\)Yg+oǶnU{f7ްc{oVkQ&jǺ,t?e]S_F)9]{^?]wajN\1=q"~zゎ-/]NS*Z{W 5`}ox^|;n>:/R7ռoWO=]v=ygq_8~#02z5fBq_u-BwF˝ҕtc焷tS]n/zsߛͫ)"?Jx}nQID4y.)wkaDd6}E㏴[DNJrϏ\\lan/oS wF陗-] #:Ӓ8JL+K >ZW7B\z# < o<c3Rg+)#O2{Q37o/-iPwfb?#_==XrfuE gpM`|4]^!fi> (0+MCno) ] ;X]Oȿ\?%&r9M2;$ f n#WO,XM(,CM&uԳ^5N1.Z6߇  +Y:⢷diڴ9i#v(}R=l^qҶqmo#Otr^'[w݁0.-#Z@; ';~fO_ 붙 PqzQԤy'N*~ǤL#d&rd>*͛|VΞAvePRvn{ozq w+Wn%]~OwF^+a޽sq7^({8ʬUon0ϗ3!I:Θ['h's8DYh ;κ?rP͐6BsΕ5~yg$ lpyR^BZ&%8sB2\(Ÿʃp>E &d:ˬT)2hCe6*jQC5 }%3 ڂ\9;f#KoG'JjC̙%uh3|r+Yw~<4wx~к-PuxA#@uZ UYVu?MLsA"CU#)%8MHO >pB3 h ~=Vk{h}Ev.Q+IL$1e+xH B3fR8(lU7951;_ܜ} n}7s8^ܥ!n#ؠωsC7ABp@ (HVEf⼵DJHH֓V%#X)Q> #$N7*m%,EN k5X,"+h뢠Ċ_ZƀИad$W("HVG31ql gSMt]Aw3в;2n!˺hdc>$i::: :\w +W8Z)Ĺw*uJȃw7K<8g8]dÖ~;{_,QNdL?Z)>u>TM M0_oŌ11@$'T?6GꆣGuno].?ډo6@ VmCZ'BZZ]W\iQiCt!!D3XhNlr7y [_9<}Qn|hVHM;%K7]J ɷX 'kz AH'%BE̎qN`>cpmεS\v>tĵFaNDaW06ˑ&bͯ$(A{i88"YgbDhε  'BRNE}rR0)r2k2qR)'uP-HhWƪ;B\K\ O *@e8EgD23VEg4BDj C8]I1䊘ki_FON`z㱑݄?Gʬ&)iNNF<4ȣ6)/4 9VЄ.ە,f3;`}ie; Ů9:m7'7XY7mB"^mzp2X"€$R#6; Ƙpgy3g(T.Œ5%ܹҜP0笼q:'eH Gc5\/,^ N{6Ee,z "DN;8AHfKVx,"e,1iD-q4dJ9 lN'ǘv369zCb![6'ό\*%pvKp'κh4=q]|5ٶ(nN1+<tʺ *ւ]L2\y1Eh="qeڶ"q_HBD8h.Y[dUA R9rt0,ͽHb2 ԑ#+Dl;&o763KHczɑ_1 tV^ Swc˾xVInIU R$봨fe˙Tf~A%(at\`ň54rg2xШmV<dWs,0|q jGkzw=L~˰`~3w\Ҩt6{ȹ _-Y '-¿~p OBE[ B+W跫A?~tLJqm9f c[:#rB+2Y2W{ScΜy6OI_,ku8Ms25^QY:~cDuS&^3vfWrxԱ[dG-iԡN~56LlP¨1?KdL F7h 2hBlch[$u;m(aWpfjksy 룶ˏx}Fq| kK`x19~(EbhruP4 Dy &g F8Œ%C(gTݣw]Cmv ݅g \8B.ANqO87&Z 9hTPc$aCekQtTP$HgA-Gp(7\t"8/]39E4 Ryu+sّQd\,;[| ⤪p$x-\thvVdqG9"[|dwoÓ=7MY{E Vm,fR0%ىGUNpF9` !V`d7ՌR-euH2T=rp1$R*|iX͚q=J9.,/Յ.UT>R6}0VDaOn'Mw6Ҡ[v4~2'@ if#9Yh\*/IZbDD02WMrX $iA!d<*,(%Bx=۾2VeɘєuIRQ5D%U] A;_C0eޭ[_,jVxn=2߭x~_tq:?s]VY5k{E Cqǭٚ r‹+(»MԀry lw%Y sBޟXPY(6LܟKFbqف¦Uge`)d/D .#%X#i6Qрפ1Ҷ5B[yu5"L#BW1 gCD!R!i5TD 4;{=r%aNLNh^CΑ"wOxx33( V].'kIf{ݐ;=zd2[*1έIGgvٓ8>yz~k7hb&*xv8N;gzo`iM?|gYOy{9ß$ahkUd6<' >` į.дDzoxv/ /7ypP+}?Gԇv׻Qݒ|W.zoOnϑ_9z(\B_׈zE>LB궓k_A~x˧g6 vkrl3J;gZRv+E7%eD\B:Hs c)\ІH'  F8Bdi˪ٹRղMǧxU UoiUWcl7!ﮮ濍9ٱj's]d6e!l7^3?Uf[P缏 5c+N&jmd$AcEG'BGk䄉/ 3K/ 3Sɹ;af*gx:a'L!2r|Ƒ\ p)*SgP1@C 5A)bΤ Q1xΒj}i?bl+Ux.x}ef'˟c#SEk4SQPIR4.p%J\yHR4å`6 % Z"[Dx;J޴6l?dUep4/9;g,վ9}Ѧ8DݻX:="3hbAo͋g\{Rܦh-5vE,OdY2 gz4Y9lpɃ0M% L"hai)<14 9u50_U6ut$QJĨ:Vul]Ǭ(-RpjҔ-Xʊ#j MLh4iбMմiUӎiְ#(Cuυn,~ jKgo3ڔT];G,mgw]8YPDFP9m|`KVC4q,.({ ayH%TsBJPѢ&l&vH},P-oϸ +pJFbP<f٣}_RÕYϴ^!L.G}NMdgU"{"N`E.VILg⫖qOkpn ̳F~rIxb 'M5+] ?:FdS'_-ƈ>Mg#yu&CmT7נR.o,ϖ3Oti$[w| eUo\-zHm{UM?hd>"3X>]OEΥȣ.=fqb^n f]5ͬY|YW ѫgV'VPՉ'sy0L}W M.2i- ! Yuz1)@%F"szؔٿRqamCh{x[L",j{D swqظq1ǻI4 xdUYzb齣pLOyg̫R419<-ZS'Av3?.[ye0Loytwjd1{ZSw8i2^d9߹r݆SsyˡOya7S_N,}R8ܪo~&ߜQd}0n絚MFPiRs HQ(چy\1ƤfV?PCdU:Τh& M DD8(EJ1N.7)%eKܺH뤭M$B#@=H(V,l7YJ9"top&T[+R=='O}cuQ)(459&&O61$,&-儢v>S8懫$ѥ{e =pq蒹H54u_&tp_)t%H{ KWay_ n?}H"7ch Vb .mIu^KuW>1D2aB_e~W/ZT @A9;ڟ;NjUJ3dQ`~>L|ꡘ,-nⳔWMF]v_ WY`WY\iWYZ.FWzp{F P`ɇîB J^:\e)pjɈ\e \eq5 \eitR Wo8=^}7G)\,0pu쌱åg`WKyIN=alzjcW ,`*P*KkԥUU•%+Ӯ#>[ȯ<)<_ǟW5]*˳jA 6FDCk,JS)WZ 8ʲ"Pj"Eu !#6hFDR&ԃEMke,YC~e:\+u{Esm,Ę{?|\2K/nW?W%bIζ2cԿ>hVWf\ʌæ"#Z,.o.x;eW|˯ H mÝ.P(rs&R&1|7X`7ݖޛ<;Nݹ$MWZhD[l <ФsV>D锨XE9S8~l_qõ*>l|+&G>^&y#NzW3&}EbWǫ_CǪwqO,k 7U}?Kkz](m Qf7f6cvzǛㅫ6>L镭yҳnúϷmnvs}vR=&9G.Ë{-ԮxEnǰȻ:#zvY+Wl~?1 bǭ<rRG׏RUc|hE,_VN/D { 80F% r.=!O"(7DFgVKjX\L@يQmSJ,D ϒO!:7qɍTy@2٭:g3)8ݬz ͓w.(^̶\1}pTawQaJ2+NghHhk3ݨ̀`-q%+e(2*S{ʘ8e= C>gII8;J`P`aR,ԅPU, %:PNh8m#" 6@sR5euMYt}9)!Gi%:.Z]B$.rtCRZF`D(n@0-d1qj|.!\0(\XtGjQ@PDlHP1q\5kv\Jנۏh? B'kX=T7~?a8sO.RI#Ǽtn]J! Fp1;K^MoQ c"FYCs&Q8S1OR%$ J7*&n%~`]m^`uo@vptۗ^cKOfoug껞wZ-}||_~kLR [!ʠx9siW5Wܛ9'^}gGp< RNڼfi^QMT@RˎuZs-8?~b8_tN1LGII1 \ ` mZ4 reE`A2 ㊄>P.,`CN瀽k0jk,vs7go+])ypp6b dqɸaZWBcC= !6&Yh%é̜:E-5rKJ&KI:mUF =z =u|\/Y헅Oofl{)'vOyv3W~SWճ$ <{<8#6w׈sL]u8DD51S[P*D Qd0:QY85vmM jZ`ۥ_pumfP:^f4=^7o*M3!W6U6ѐKǧVJLjtw[EGI}7jxd/]$Ղ.a!jEE\PY|4W1Y?/y?w>2*-g\"Ģ&SR")za4E9cElj?RRƹIR:1IPDRBHψȜN&cYY7 PמWߎ^_x zD=|'5"!fmq]W0BVw-L}E_^9iK MlTŜ3 <@Bi\U"#2r'=R 9!D%:2-L,3"d^H]O!c\k\VH qfklro]'oyo_^ y;fh]Pux}uv_+@fZ' =U51;+:A% }_Iy6L-XBXJ"+VN )@Fi -5@\(cN%éSjRA"&ypE- )$#&^@  YQp 1PbygLP^*Ye֚8@F"v +wEoBLMP&]D~kǂ!;CGžpQ mY 7>WB !76@ ZuUbUP-E'Ä$me+X7gs]t)B9x u{kwƚpQӑG(ΫMåoؙU%jb?tc3x?(I8ȵ.;^޻z g] yy{ / _zzE]wW' 55Xjn}V.R RKp^aulrJhr֨ؐ|@ڦ਷*[  ZV]] _l 2fnkkf_j[5JK,^TjPPpq= ⠲]4ݞlL%hiG?pu" v'JV!VvZOIGe2PBi*5/gh_s1J5 1^QYLIHZB~_~H+|Lyd;8&` ˆ֡Dr"xc5ۂd`BA*Hf*= 4ݝ]\ f[Ē5wwrǣ`d3܂Y[c).:&|`MB@1,DYԚ)chƁY[F"FS\І`H-ِncu,Y _Zι ˛c-bU )}kyԢw|ª9 "Aa,B,ꔬA(R#IyC$9m H]7`sT{;uNW'֒'/4|Y@ ZDh Bw%BZ؋@\{jߓu#g JhB8$b5fJK͌I\|"Hķ9{QCmiĸ(-45Z) pFdb.yKp)HKaKeiy"4FkQ rL7g)w_KAV3E;FWrt,GDӿb7-I49/My2 }c0οGyr\cO _yQ<YLu(Y|Jҷ!]p:׶6$Q‘a￟Q?h ~v'~IMI" >M׊s\B J՚#ΣgݞÌcOSa^^^ƖPz딦O7n iIƞ\US7CW/8Ck#G1^a2ЃYWUgLcuM6=ka Gd%7>7};@:9"NbǰQ.F.zq}~rӏ?}x/駿}:G\u#0>'6jk-x?>yײI]S|}6%Ԭvh/tm'_;/>{Tɪwlļd{&_Aq_b.?nRq _ڄbJ_}>Ҿ=-b ~JA۩p8?~z速6m}cQ@'FW]cOJ۩ G+9HkTBsNJɆ!qmPJ68uGG&!9ڡ4&e«w^j 2 !9Et,eVl ^$+PlBTtE5Chf贽yVԁMw:\-%j?VQ 9y72qe#u6̍s,ĠHB 6EЊf#杝I~dmP~<9^ݠX]0 p3J =K!Sp!@R!baH! XtJͳp7yR~^Єڡ jbcɴlʴkkz 8DžTh#E0xH9&=epFZ6^gІ/$GAi[p-.̞\é<(0}:Y\NpVC?ܟTDO/lw\~ydD frSk-6lL >?yie<)'rl,v85є8726"J7O&-4VEcw݄=|;Xyۼ6!s_' 8y:wQ5z)H'DB,\Sim2E VT OL3h$s^0#lFq!FJeFi:x#ߊ 4BGjVp4Y\ǒjRrYJJ;LE#KR(:JR(:JR(::¥ufխ=MWgijٿRT^Z<1p2X+R@TAkh"锨mU㗦ͣG:e;;S'mvtCm]ty Hbs<6rm𻋛+M~n~]w],S{᷼ /fkң9ɞR}# bգ.TFҀVyp _ޞZ]E7m)Qݎy.z C0u囮: oXp#u@c\Q}ƨdb@51'(w[ "RHF'HHc[t&N rн;6Y3F% r.!{DPNsOJpt"aђ('2eNl5 4 'I2Ke(͸*'Ӛo9 5qOx3\BM^WCC mMO^ˏl33tzÛFу,}.n:_[Rj/olc[l*?b, `J2#,K4$\kcv<`%+d$7jsGr^)C=x:$X1@JUR*>m[YoW4㥶Pl U;S osrFXqv̓?n{1q-u:ɀ6%wR  oJRALO!60ce(gL =&1a0*вnM{aι<ݚvjCV.vi>#I\ݖEqs`7`ܧ3URD2Iv)RIi X9͙ꮮ%6,`Ii'DG„Q*´{-$:n:xʤwx /ڳkB h$8ƜZ[؃SAK a1qVa 30iR㹈h #iEe3~DE1gRqi8G%Q,j UW#n Q,èzY%oKZ͟pGݷ0HVߌcC1Ն)TNJyP}MSd+݈ODJ= KEV)x(q>HBQsm܁~N*.9;lm=7ZsʧlDZ0g>orс\"UYcC) GdpJiwڝCݙQW9AVPoL΃W̦$22E-׎mH!*zA΢6)U YW@~<_ɹn;nAڥXmHWQc or^9*fSGu lϊ* +POaZ0׵^%N6ȤWn0B%0.t-u(wH(h Eyzԉ [)@[d(YVh]P+;]׸ϯc0 /w,Bc>HԘ#+ͺm7OayYEj.3-蠦"Uf&sT`4M(+f6uɃy`}NY4e%I˾˾PE-dQvaw[h.*IiX)A;Fgc'aYnsꄔ$j.rP:@ûC)Pln{IJXW=^ b*r1wQPgW:g~#ﳰM0k= 'wFdov]~7׭|聢]vm'v;ӉLbT`k=8SOG'm":>ToPE[#!\M٨mG}ɓn8HMl}&Ɔ0JT7W~3ُ0j3w#=T?P\oT% N6댓媳 fcwןA.ɟzAΉYS@v|,H"&( ,##-F`V2֞jsJ9%4D*Fl, B2k,86hwװsTuUk9a.x!GB F%R 69k0[Y @S5,]k[/[g3֍^4=TQG췭^փS]囨㽹Zܙ]opao>\ٷӷ]ֽOиDtS-χbWW6~9VAlnMVGYw1yd :ݚ.h|fK sk1Ӭoy@:I n?9wK2Y'=IJP4?o??PH V'Tj@QLK⍀)u.dSQ A(czfl 8 TR$NdIr.<Ş8b9S*p^L9 4AW1%؞=^i K֣OGK1D1ŲO0WMo/_ Lm)yD!A.d!j <-<@)Q\*02ab^㼿%<99X&&*jKL,5ejG"qoʭhI6$izْƩGs)vC9xNr k^-EN%L;^;Y.}<\XNŪˀϗ[ ]6COޞV췡k>^X4{}.8ʛes)|46K%{0|n]Y;:{m(tHyyy _i7?>i olϾQrƁŝJru9㲴3.Kڜqo1g_rDp3JL \ei\e+U*K)y WoUL#+ⲣI *K9岶pvJ3L<"GWY\z4`v/,lW1lL,WYZsK %ŪWWt͡`0fKgL\ WIk~j#) >A\;iUX񣁫,>iVWY \Q̨~Xj.:̼9R OKX |#/=٢fVuqoZ7h.Нg~;M9!O"v?zꛟ}{ިᢨٷ<{c%dwH{1+١ӟ:#l XD0OBbn%%/)!rByepjp# ZPۦyMNOW敎!63 _, [C"s&AS"' a"3F+B_L~3[\;bm2_Z|1R-_mr7s sl@|宛\>lQ A{L2%2%ʼnF6H)[fB*oeԮ;t]u}6 rۯգca\lp!uR]-KUUc1wQD5Ncp0u# + S'69k5Iܘ ߌr,oTs}O-_Ms" 0&ƁyxN1K3p"1F 6 ʽ,[:ce(6 J"i  RO|FQ:r"N)}҉Y_ٮWm9W*%vcجi1Hl=s!iIdtWU:659lajͻ-5%y4mּ\bfm]&LSDpl#H:o D9Np(5ЬQ(=4nQ:JH1rK%bdF/clΉ-]2nF)" BY^U)J}]$/&L]wJsy9lОfXΰ7V)q4&F \8O * o\0< i !D%$ItYhYeu) Ӂi]I;m@.rKl;%Xvq_2KmKnZ9i5)B'X0V)2g9 '8XnC`&iU3АbZA( В2 #ȁRK3llR?z&I0D̶>+uf{KM+>p*ޙ8$U ;d2"e8!"HexVFHHqbKFb3[XA`Ir4do$f.Ye.;z -T@,eQS]5U%| ö wKn_EVd'CNӹb7{$S9M:b$L7]ԛa(%MD\y燩`5gIR)uɂTZ bFs[Vs;ӷ_4)cђ?eMSp \s/ `c`BĔAuQ`{ƹf"YhzPT9cD"`"@?M FрG! )"YFΎfk\ 2 ﳫeDڿnoVk\T8vRi&`XpťuXnAT}H`90΄,YKydcOL!)Hnb΂wD Ltj#{@ˁ|286kݟf[[Aڤ9 FV[ǝX"IU* aa*9!Aep& zxKHe|LAK F8r=$hӁA(DžVj Hj*Jdo֑Ŭ#ϣ{pl"0h~ ]lU@"Ah9~cA->7WK Np"h# MՆ(5։@0 6 .<2æb/yHdv{)zz$TNicFmP*$U)-Cwwʱ%1EOM%O.Exxٖ,yqz/z9ߋrX|UM>.YY&bbqբLǗ0/w\,Mi-ӑ{By{^0)JԽ\NnvQ޴%h0azi 4ZNF*p|Ë_^s?W?9oIbbi+/>_қ͙|޶|;+|_]̓+{^z2bm+,eq^"VE7MnJן_Ëzf*E!KdG2?0Eur)L'/5ϭHZ fP !mCD!xXz5Uɡ$jQ[> B4@d&+O [EB S#Vs}@RVJ"(A瑐c׈K'5cf(Hed5Z_R:o~o `6=@v 3xg rوqE͆4fkPl8R2i6:^${& %2(wcަdw<'pin64O^;yID_=/'&RN ̉Ȑ@R2$x!cFr/43cE8BbGgt/E y;xD$(t A!AБ w )Jv$cs^Voi]%]X ^V=Ь ޙIE0*GC7Hntu5&R՛v />ҝ&wŲEA0x׿ ]W;ϊm-ߙeU %R ;=v&&\f͵K}x+U/vwi"Uz7R lS3 .4w#fd<u{+2@JF/tJÃ&𲣍q+ % Ƙcl2˳Ckgm!7LN?h;dV0#*)dFRL0&ˎ66ʏ)҄='lOwfEHU`v]W+C͸LiU4JgNdTo'g'_T$?"{GoL!k%^qٛ"al& JP5hi|#澋7>gZe7w{Uc8~@'uWofu7G?n:9yW黟&$<ܥ}~.){ 0* ^RW@M*@Yj%wORs v?w] TA[0 bw>uo%Aw?'_$Sj9׳U-ʟ5o~~; վ-봗\OV4P|eȮ}19Ay ƀunyvZj,n@IMɿ0Z(?]EVJ1"Gx-T9J-LuJ0}:>őNSÉByC\PU)0[E+w 8(:Z30{-#c1pFΎj$N5 t3I#7Yj#i;k&P׆]5h|)ԙLmvK.{գ񬪳qX;[9VHZ+&l}@u1li>R.g7/Ѽlul4wj^ּTrzH:|w}.B mkE=ʷTb$(ܟRkK~[t{?z]}[UF?"!H-K[oH6BK~3M% ;"~," j\j_+\)$.IĽ~~~uVi(F$ $p@h\ cZR%bmK9DŽRn&x9 Ű, Ƹ#aR*)$j eϳ]? ]ޒO$adT+@={3{񪇗wG1 b*1+[铛m:S~ 8SBIѰ/-D4g8ǙG3 )!jS(o(n0l9 DrjpV!clЪ8`1waʽ3:aFhNP839d!@q seP#&APTۏ=T=T `1=^}$5Q";^m'M}2}Wrv3X* RJ/mPqxIס{у#,?TS+ jU\(hJ,4J lT+Sd %:zŲB5B@k;RdjȰA2rVk/;^sXe3ClV9v\]h*Y! /[6d 'UPC8βжͱGG˽OpS4$=$uUGPY(cZ|.aw5Y{WƜ7ڊZ= ,֝}*u᢮SQ.tu6-%JPJ>.°A=~wmI_ܗ$ȎG}8'9 O'r( UpġH#Ȓguʀ'Y6LreuGWkj0bAyr7Q0!cveQ8a8z %V dI>.fm{uӺUַZզ&[M[-!2{$5́|~\d;'8V{| *TMQUT3׻r߾~ëWo?c_^ J %votaC5?o4oihn*E9ɄonS]}Jgk(|n߽^ߤK:g,9Yf>+mוmWdnRE٫{E|JC<4v lWct(e̖T>1c(NYibE@i?Mo#2RiQ!|ENk];R j9~H#pcœHZqkq(Q) BF L ӒbV:oqZ1Ba|{#6[UF Z\Y+(G XSԑǻZ/. :]]롇*$b_9:뵩M2ѧ6b/H+r` fǙPF:=jޛO ܆Vʪ .7ᗻgx #z!U$K# be}0ySĤ.|E@¤+CʀP$IͦaJtlNIA01HƋ·ke-e` yM>jb w MX5u[mjN)h*vSsşvWJO0o_+ E( '/L& rq7.f&P" ,!Y֕ %+z77Dt&+6)zG-xl(τxaʋa?3P>ʫEǸ/i` ʅPg 9$g0Lva&\v̤fm4&<3tn\8 YN,4VyUJu9=@Rn9a+MP]0FQR9y6QJ8ǟKT[E=*C2|Qz> GKVB?(NW$m2,"890r+\.$.Iĝeeq=,Spb•ξH7#MrA4 @7U;sB 2؅l% p|C`ɖ&-X#ms՗.XtgyWGg9GjOlt!n 8SBI.Ѱ/ͭ8D4wq;cř`ɄZxҔ)eIYϢ>6GEX"95U!clЪ8`1waʽ3:aFhNPƖqfkMŗ_:ٻ07%]]s!5;̀KU.jALWWBTW,^F}wſZ(sK,27 1&:#EyMd\?/.Xo"w,p;q<\4j&˅PJ Ơ51J:9"ΐlMysHy_~Ϯu*M]n=ؿ8\&W`e7֥҇(X"IU* pp2ph1LH18^8Be#` c _ c0 G{q$Ac"l )؁r\h婦ЀH"]%4`2DOnh/%%6'% I9 Q Ƶmkn;[xmbYs6x( >v1յUjvo<' Sx|nx'j)UH# MՆ(S'+8bLX2i GKߡFqyrJCd6jRA3OU y(ǖ(T#=5,d|}XT1OwΦ[6qJLJOnz~cl~k.7a%goNiZ*|rR1 <flۓp:.ɍXjge饺ޫٍ EI]XׇbEc{}(P,jMj>Kg_޿o8fQژ6˖ovt'31n,ghJٷfu gwMɌmHYPupx8ۯ߿޽9k7(X+L|d}s-7AyLp4VznD2l:E2A) с0;Mtٳۊ1mIԩVm+zvs`t% &6*H$_AXMIEJ"hA2:A0&<z̢qFcmGOW18L滰r-q }2xU)/܅9qM&\͵CA2iM7ާV[ &%777LФfPmIqQ**蛻5 Հc`YŃi]08GR'_|,\}* 6DZʩ9SRN)u<1#"!XÉGw?%^ ίw>"N;IQ@B#+KQXSL 32"8r!pPzpD.^Gת5xT^Eyv "08ncsUƉdy5 s f}m(]dedc(ο}('Y6Lrsi髵cp5~4BRB,_Y[߲Ye Ǧ5jK+хDkRR1 9ZL3&G\^ZIHӷ/j\PF:еܿ3]m\=<)$A&l.}6yf,̦^Υ~=yfE&'0w3Nb>0Y۸%7́H "[cI"!˺|`;I]r{LC;yd{5;E0ȤL0!1JRZX* .j+U18xzo{rmkrF>Kim!Ij$\Cr,sùq 4pkT V0wpw|6 nsĸD*5yD!p>9$ ñCGBGT!پbD4+` >J< D:^RU3ڟ3!|u.Go΄Mv/SSg܋C3f][o9+_vضy)! 9b_60x%G8oulK,Ӗ0lլfW*?>A<Ķ \xgKit1QG"n60ꐨ(%y.4JQ#bot}cV;E"QehCS0$@a[L5y|g̿>eI{~\iNOlqʧVzϴܸzu҂\HE"[52A&71DI X H%]esI9]]rCةʛeLȔ{w_ӌKC Bv+wq)fx|mbÈSEFǟ_̨񿃻)6[~[wlc;}Ws:I/aϾ5}F(MI&_hRAц%8VTYs@}x՝9͈֞xsyq:9vF+3Oo۹DZˋ:aq؂\--k-oХ+4Ga^#߻vY⾇OF/l[>V=kQNǩCo}9B:9#~Dig0\{n*Q'9Y?>STǿ/oK~7|8LQ/eD"1UNӿcEi\MZ']iG^1j]?[[coo]w3Ք0ɓNף&F N~F3? wG?E+oKŕYh]F (5i[Koc AJ 7p:8vn/)cQGODkʰ&vt~08J?)A[Җʖ[Z S:0G 9/CFbem gGiM.fHvF?S^欸KҜ\^EH.6#K 4ipK`u$NӉ?ϟou›NE8l|i֭W 0`N <!L '4r"< 1&!jF#6EB-?"'cly %[nRk|8ve/_GoÀ9=F ĩv!@b)SN!baHu m1(5YI]wcw2KNLt}Y(*k1=F;AZR`<$2bFXp\ ou(Nj=ɞkӧ4thkF2Q>2/GYZN}RGJ梛>ohF BMkR,hOrAn1P7 &;Q. #b6 q,QΘ-=a@eGx|Ɩ)EGrO\Zz44Yߗjw%ry{'Zy. qF(B9oZM)k{| g@)h'Lþ35)w`:0%꣥v}iTT*x8(p!I  94(,$D٤xiڬȒl ~hg~QKfK񍡄֟1k['$l$p6p!QF'hI8Tp\ՙ9UJ( 4jw3&xmJ*#S\t 2W!3sufn)̳rw}ռ2E=tnڇ^^Yk~<ҟqZn\YoiԑCZZ`BL{[Ԃ"9P*FVG#$K3J1&*yb [p1qV[B&avnoΝW8h#{Yn'4<Ѡsf-P3mpɃ0I$K"E&RH xb'i!*s*`>f nbmL\ei(AS΅V()5khm:%*(*VcQ`\-럧 ?]B]y߹DŽ[N深[߇߽=/{wk wH0qG4d Wv.3}laDKk9S98Vsn5XcPoKP3SQ565ߝ1*Iu&@dф IR];X)e5C0juί:nҽ{mǣzvlmR$v(w[ %Q"R69k5I,Froo*̒w(cO.1ܟ3F r.!{ H8' h5zUAC"Ca2,@%E2>9~4 2O}v{?NѾ˥YmdnChqDc,ӓ@g-@S{1j)Na[fsGh) \2-'V)G̃T=/ 3z36Y"h&3`Q]!ᘝx\ nb^Qj6HNKC┡^e!+6X[Jc0_3gglӌM}* eO gg%m.sT,_{ @@uZpL\ogmJ*1)\14ߔ.'n pgmf*u [V0#$&D҅=v1qV{l7臹rWv1k^[W]"C#6<`KaTGʅQ*!^jKceИN !#2QQ %`ђDĺD VZg?l钬wfRG4=z4>2@cm+-"5PO68" u`ҁxlQi[vQ~kHGw X~5D%UvXaw ՗T_ftŌAxFWxz9לkTLR-IauXʽ*,VNݟ4RFjPf 4(w]F]wWF27Tw!wՃn]=L\C$R[ARرjWO2J썻wr-w]~@w(0r-bop_;kvN;v8j'[R/)Ѳ ,mr?oDŽsmuc5N5VYd?2kc}Nr 9OxR!SlDW}~08W3|VuO95;@`gF]sl=kX;33%x\{MSQ`@fc;)' L?JYja `7<~ݦiFBt#K)83{ל'U񁙭];iL+^3W౵ h21]ẹyyϳVifBes]e:Cq1 &N &~2 VHu )S8+&Κf M3><=ǐa׏7`;Rs֜+]>J&P[愥pcS )8O$ (,9Qv*҇*&h`PD3ę8,!`6b6]r9սc1)Sw<͋V&xG{mO_z~:I9ʭ݅w:`zqK] ,qDj`A`rG?ƎA]qIXar.\ITyE s,NTL]SOf,"xw:̜5^ʯ/>{xXkh\  GY4tre[՗ˠ!fs㈻ T i^C!͗פGu]' FaJqTQ( Ȣ`;X+ϳH!mYkM9h|AC? Wz<6Oί/:xuy*t|tsO2)4.X7%t&r}~}TPq4,Ksk)έ'Q+#):g tK&$,{ Nt4N4B0>$ :(#qDG4։zNƠB+O5$DP B4σ zh*ycl i*^0?*e'y"ҏ.ٮcS< O*=Yiss{iJmSFGCV`M15Pt Auh4]X" BsJűґ)pNkxءtSI#6 Y@`.*5\1Vٖ]kl᷻ &bCf:~=JAr 76Fn0A l7.::Rݡ#3bS;ë 1H3y$"kY"4aw yD,R"nƅl!O!X rhKĀl$?XsZvj_JhB1p15;˜̾aHӧ~:3 7]g#.و_وmg0d5{O$qn/a9VD)  KrYkyR~@=bQLy`)LH̥R0~)KZX*@N`爧]7ݲ:$y?&mxk wq<V/!2:\(m҇,xpJ*Is͉u*9PU3]4 >ho0<`sfyt=Nax=n #F텹 bwzV-ճ(6p5)|^sap*F5N+kb|uMMՐj4nf~|`X(ԋi%7uGgu.zu+NWJZF6dci㴳9*0|~\*j +fiG>ٰǠPBerzg06~O'y}W| =0 .y4ăE!0, U֪ꪩb[Tq:|s6Pgì0h{nهW7v&)Nҡl,އtd># 6L7\UD!ORQf+ Q!Zٰhcui%8cJo(4)P𓦃"2$OdFބ ?Op3# )gb@-wSK~p/Ne-[JG`.旣t.{K*%㑅H>H@)b.|E¤+CʀQ|#ɒuC| 0mTci=1~Stqm$" fh}0IflA2]n`iY<سxU.lSpXlvտZʄ2@/A}0A*1j 3ꄷ䑳sM d.\2@۪0;RaxRa_]]5-#3P)rx2Veْ"9 f/5惆!pk|pP)gpyb ;kچn)&#QHYu2LwZ D佖1')+ bmn ҕFȶ,i׹_\. VSk)EY,jC=-Ltͮz oubT9.tJז\ r%ieD]~aZ_L  bxVE:Sl`ͪkvU{i&W=ϴ\~E%]u<[t<]8\yWZLXz>e6QۼIvxtG\akFWZ|Gusə^Hr#dQ43#A? I!Kqy~n] 1 %\و$anM" BQpLKvXd4MHR/~"EZ\EVGgW-|x l]-Ĕrx(,$n^ KL $PhXR[OVG -lqLGdBF-giJZJ̢>2GEX"95Q!cl08`1waʽ3:aFhNPƆqfclS'.'.f ^JJ8,NJ+%Bs$kͥ1r68~޶`{띱+:$N7+-ﭴOGDSxך}SA?o N&/4 VOM`_"J؉A/#c9(N}+zjEO+y82W(Z3kͱ&ģfQ e)&kRfməV4T C[-*T)-DJwƂ.< cK *b<#޸Ƭiwш_.j);{?d_Tde\+EWwLwHX~'csh\7/sB')n˰coSpSvA8y.,t5KYz٦7zvnMSRaHð u4 i(Ŧm|okzǿWx5 6ym%]CA' ;)Q& cV5~>pӄ(ۄu gҝAzF?w?Qy 7liaOf=r6Ax{8(=A̸c->"NFGMP;,2@D2l蔺Y6pҏWPLbojJPKu+ͬiRǮ'9 )mMǍnv P䩟> Z]1 "HF'GBɢqFMG&l}p%Yl.e'?* /{'ȭBj&fxe76N'b:^jQI#LF 6Hp6 ;,d+0UŭyY08GM__gJcp3՞4ĉ4foaZ0tfkn-::Xk5t;S ,?Ho|ؿ/_0s, .M$P9HQGH(򹖜N)Ifx VyJ^ =_4% Cs> %xPCxkhAT.3MҊ,OC6M[p݆±ʾ^6vi{߇rtew$r]yԽw՘˜v0W۰akb0Ԍ|Qk6:Vn-b H$+kd:bmΌ6>VVÀP& -,I+aB:x@q(F|ŗ|f^VXuva A2%٣ـϧu1B2nV0dGqD'6\^q [HMlƭy23ŸSyMqtYpvhʵӋi9TOvP{9*|^drw$DH/nR?LY!x0~!`,^W=9V=ֵz!F{֤cɨdIs`_YK 9RWt*'^v镗3׹8vw_|u?ǘyy u~H[RSHBl շ-#r'K µi B6gʏ;+;>[ӫ^Tu5(.Zhްa凷Y<0/+AE5j n̮Az1$xgwW/(c_v$Jf ^._O#(9+@; Y m$o6<5q9HG ew܄K*Ljd<)V)軔uA@a Xy$RDo]gN}&8h9mc^Zo޼Xd<~iVR& 1A}0A*1j 3ꄷ䑳s- d.\h2@Z\vcqkŷ”rK$mERgL [Ug9 !ktF"PlIw;xUS4 "oA}K@jT\ߟ 9ٝ$ x$ )[X1cIlRٱtjKΛl%٪K Z׹ (/zERpQ̰g"uIO`ʧAғwɻy_i2)B$MB']k%x+l*q[#<-;Eʶ͟RFE7^*`6EE Y玆gw%-Y?w)Iޒ[]x3S]xɵ*+MdF]{eb)*OĂRe rwEcvYk)Ec7 d5ܬՠtSdt?Eȃ@HEKГ)P>*aN>''F)PWG)[f=XcpXxiQDKqzp"Y>1zyD3ڠÓ᧓# c*q8fs-X/JG()SN{[u&dJmqP&%tJ 5ҙȿ$_E\\к8`+s#v9r+ݬwCc=jTnC>N;O3w{#_n!-b0CPfѤgI-P%ǁcTlĤ:6f r3}#"#JAwe8AEk<`k`$Sd4ȞnAض^-$7̍5ZBѓ7T)R0e'΁b+KC*R OY7 s\g^r\!.{\iȺ?xr_k>uoUsVNy}ާwyzi7ìz<͝7̪34J 2ɡC3DMїEL]ٲm˖"G "hJQ*f 6y82h1r"Z0lٜqgkȃMFKsR3'u f)*Z(4`)Z3N2Rf\Rm#gĨ]E$p $G^+ˎ&e|DZZ+53g_F]jV 8s/)hMӏ/5^qwzzLxK]]ѝR~}<؀b Gk 歨vN SQu@[=-F JILJP^65=!鉂Md\͈u*uÏO"ks] ֗|- x,峕N\E>TƇ#(^ځI*YaEDٜy8,z{"3*̊,`M=k0X/)}-V{cA|-ː#ۗ@:1bEn|?+F58xQN{2UnZvuJɂF}p~enWWB%{he>"Z K92TNWqd];;?83۞l7^/e^gRG6Zh9GG{62Xc@tV{$ਘQdJsЂY bT lҖ" @Y ,a@(1XP2J4S43g/¢p8..kg=sir^3-1Kg荭 OŚlWj)d,to;ׂ`ڒMeiJgH@m([U9!KhI߲_vkנz[ڇꀘB&m)N Ȭ#$2t)fQr˶]۩&_W풸|+yK$MP>ɰi(ܮxLHy`pl~so^}q_^x:+ʽ=+)t7pm]ZԮ[9zz>p\N+6˽wJPW Vf~6T_ֵS~7b޼o.#& `߿:\n ГMs"[ P($,1n,@ct%,oP<,ϧf8<|SljO( >O( >p{ w4bU :?->{kNDƃ*d{1٢1Hȵ|1Ҙ,U)/cb%"hʵYZf/c6/,2I)ieRRkHb1EN8e I-@*VrMKd#<(.JNQ-Ag+맔foV`6EEh-<̜.Śny~Sldw򝶗eV-(U6A9 wXd*s7B (̑)3y4̐92Y4kRf Thq_`LV2{icجH(h=el-$92ȸ8fεKsz}p.X'z\qqUh"s]fx㡾n.,hOȵ{ 5')t-C6O.F|p;!hUCg2 *]"Yk^iQ&u4%p( 0S*g#.h1G8S)N$TѢDKI%Eg ēҼh.?_˨è&B΢4';-KpZ6i-{wu{:g񖞻njY˿ljõrB`ob [QwQ+p[Ra܋>:{mE V$H%U͚,B(DeGhI\ǘx^㑧\%8_}ë}A@>) A\tJcbސ*x˔@;0I%";lH9;oEoOdFev7Y%lg Yr-\(Ͷfݳzo ji})*JݗYCJV買(ı2wړ%م)% LH'j"AȾdo}!F+s ԂA^Xɕ%gb>r}"AH$3#v_.{B׮RG6ZlQ#z Q1ֹH(KpV1 )Ȕ VġEXI Q1*6`iK eĬ 0 e,T(JYr٩ .ރYO!`8%Krsir^Omi"J=$;:w_ x:`H*Θ;ȟP!HTGnGI꽑,uϟ_i~yV9Zh%(D]U-S1 XS> Uyy2(=?NdzE ni}O?x"B$K1J/9HH 5>`61w="lj_u]st &dQ$ɷV Y!iupCaz}Q$4#bs:sqđ<wՓz%T9)Ybh7}ۤSC&g(LF\rsTe%U <.gWE<@TM, !ם G02d2@9%i̋l{ ܜDW=/Aw:ӟz];;N5p>`JWgņWϺjL3gsttoމ-i%y43R=lh8MG8Dϵ*TJ 9g>#}HJ'4>31$ʄ&K/%˹`%}ιIgb!%p! ºY:Ae QxƘkõlPE5r礡 N@ԣŝ5^$|t EUָ4g}&h3iw텘oRhءWBGd7KJe7S*5͔&j^* Je+`\컳!C923E5D{B F- *+,VVK1=ΦǦk>P j.=D0ֱ甸j ܧ\W 9t6y5Џ]{wg[$d1 bTzwi{ߑ{~\9lOnpѷX59{Z1;G+fوmGz0{k6݈ǻA׆sZ:RLoi]6nJ0+@*Zgi\GCg^+tVU\l.uzTzqu(CY[Qf@  nZl;JHK2xHHبrKp0m$SZ: 1 wW,^ b ?0/~`^,@dK` LHZ[4Hm ɯ>=r( BQpn$r#i] S~^A6|s +E< v畠\֡LpXx=d Jͫ++x넶^ |^ {άKKr 3OY[ f,lXБ u'1Did&FBoنʙA *eQj .%Tjb :wO2x)g!G" d[#gws9?;ׯ{F~(CE[k}ú8BkgwKxRQGۛo'5\gMR=" I #2 I霝%(̃M1:rA5G:_u QAk ug,ĵ"Ԇ{5NdҊHB"5PVFݔ< ➺k*׮0MԎ$VXwh$2i(LfӬqkmbBjn19/iIF%ͺ[˴w= C'/5TRȠr}sR:Y[ID y9Jϖ`>BzNpů̥ܹtY)?N\ Zm'G-\Od͕ӱr"rƽ:F`fLQ @0d )Ɓ`䚝i%pFt'1)˓4_XrHcɻĬRMz\-@ }"Ei8.Os|b=Ez]ZLDf^(~T0 ƃI-rcCSI&CI✶tsR[rH?u<~|ep91h!?̃.xtPD6v\|-L'4:FWvw.q@fީ7}fi{ͬlc$gxIl)M7{L36,˜T«`00[y-M> :fSN>|ԠDLl"XrfLB$-؅ZU U,,e dLcY&"dhetl-I-AbALӘ9W@L3ˠ,Fs ,w)*AfȟITj CItiԆZf i⑤qLSEe[HM4*)\l1+M?g&"i")$E a ig*KwsꐙA ]Rf)Ф')%wRJIrT]R7Ih|Fţ .e-%c l,aY)RrقwA0ZA6/QM},LuV% @%$+38mssZф\LKɵm G Db!,jg TbY.k[nHX"0a^jmeGlG[KXS 5l@@$ŦEJi?HBwWeWg=9id1>yRȠe &;.jAސwVrE` !@ C9c|Bc1g#n a!6K17SL3H̚` Y6֢)9nt.VAroSCAV0J892bܠj ތ:Fxw"< eI[m(HHҶϮ\F(^AZëqߣ.OtӗTho4f^n!GAz ($dTLAh8HZ(  ^ B1dȏEBk>84i xᗢH+fN2*&6/ Q;L')"__bUaڅ olK7` +J$W4T*#h`!&S`#2:/3HZ Tx LĴjyU,C Y0ⰱӐ$>y _V 5ȤuՅv 7tLt#+ѐZbYьmC6YK1Z1pߩacU1v_=6BI]@,M$C.2t*c+xm a,{إ)G;|>#Fs 7DD G*`a˺P P lcJQ %Kc[ AK^ Bk2i юmBxbkN+}E΃ HDhQ5]ϼbǴ >  W1UV;/ qW_*a+\W,b Xpł+\W,b Xpł+\W,b Xpł+\W,b Xpł+\W,b Xpł+\W,b Xpu H# \ZYk]\6K\̓W,b Xpł+\W,b Xpł+\W,b Xpł+\W,b Xpł+\W,b Xpł+\W,b Xpł+\VWO+ J+c\ie+,W>)\W,b Xpł+\W,b Xpł+\W,b Xpł+\W,b Xpł+\W,b Xpł+\W,b Xpł[\YO+\J#ݝ\ʭ \93ɂ+\W,b Xpł+\W,b Xpł+\W,b Xpł+\W,b Xpł+\W,b Xpł+\W,b Xp5꼂+K֫Ў^^ж5 +Z], BtsҊ}`m0X-þ}ؙGOdH{Wd\ \Au"+#Y?;>zPxL յU~Z}zV;Ʈ52\}죗\!NWd wʻV,O+C H?M]QmgśV'hAnǛޥ=w8^^~1tn߿Z3*)my&𷤒v=}+3wfO|nzBw5a&_=_?lE>}.?4@(M]mvJ֓2 %|WLN*$.5 닏}o ^M s8c⏃l>?<%`AkPv>e߮ cOq"H7jBѓ];zlx܈O>i(@X_o< aZc5SӔ!4|I)'.x/Y!kYLYUVT6*i-X'ngM<;E^jK- i=twZhYcDv[ڼ<ӗzLJW=מ 2\ zz_^]ꂃ.= v(U?///hi땠Q*PSwˢ],&,u5M~;O^ٌ(Uq1hNS Gc [f=i| B$9Z2YŨR+(r%ȒT[U):m_uGz.l(B[umOZ>6XX{E]lz^9oWB--7?y۶v,-Ii;1vO^?[ t>Q;F*ޒ֗SVxΎ]>uhGBJ0{V/kr@JK܀/@>ƱAIc.,::b8ԾYLq>EgRƇHWn #­ ʍ08/+.bV٩e,DZE 8":j4Heot3G1Jԍ s50.F\wqk8Jb܃w 0JKh֖kח7C}": ;\ݎf->sb 8EdUu1j2הT5 .>yuUlE3̏TAaܐX`(XeOIb2*U/U*Ü/z+S)Mb.I#j2@5BtUR߲\xTY̆Y6,ٰa -A͏^S|d}9|?.]uɏ⤼YQѽrYO?OϞ}OSjhѽ::ϭl픸ǃ蕩#(~%30Y?oI edWulY@H #GJq7AxT;ߦpGX0~fPo7'/n!.wmX|=o'~Ε}'L_fm;Zu.M"(퐛O, Dr{'~߃S,I.~wmM[GMYL?f~ =}~K˽W %>l^Ҽۼߡ#]^ʙ>.}, `|tv>)l}J,^ᡆcVqUDY~q ^ }Nr]wD7l|y n^6!6F6v5j8Lu,vI.v_xz٪A\Y;ؤ9Smۿd`@onلU^VyWW9wk8<[Չl>`ZgG ꬫTwEc86yW-/n}w?.h:웸c[gpgy2tJZ|3Th^eTG]5lGĔ]1Iʠ (/O$)#bmW#gRS$>^ 6qxu=:Ƴt}A6"6mXسmvг]xWvY`qn :> r#iB]sݶݵ[ӈ /bn!Cfzwnҵ($umݴFFr|;7h80@wyBۙ~I'0>7.5>h#Bygwyg}" *ʆ̌,Py )503jυMu HR(IJR'd:@E"Ykc&* '6lVv}^9ifIC V:6 Wzxlyr鲃//[l|t@SXQl5.soh3ŧ 8z!!+$oBYdzPÙp(mh)2'KkHAEJ|*!29cLQ8e/\hV8򐸊.rNgTɻp Ykäe3KV/rry5 ϳ`YJ8b4un:0Puruy/U=jaKtR蓑 Go~( s?6JMr+WmjZ B#y:j} y}q',"$Zz6YYrDBZͽ4Vq2XI$FrPf<|1{"#AM2ET&P9;@ n<yumpȼm/ Gމ8۵Hhء#B3(-1@};^4("| "4UHr(ҢEMV|r@s@zنg7WR9~Jgz9]"+<Ⱦ[D5N˅Yu]mOBVKz7pЛ}e{!={-s݇u~n5 okWU(gWR~Ɠ,u.l6|絺cEmR ڸv@ NOɉX4۳rt|Mst8Q+[Veu(k+vzo蕁ͶCZBZViyWR8/PV".8)V-p Q)  8oW!'5UJ2C$;PX+"d >etd$gYKK?ّc!R!ȔAĿR~!+H1$t'Hz)<-`#m|Y%oݰ{8X va,5`VKqw3OUУg5`#0&ی 8ccՉ>#ȧF{pvho>/P (2ތ Dn8A׾. (A&A€ʀYr,q\8-<V"Eh}v@a][oΗqČ6'.nz-qd$');\0't@0)Qࢷ$pn)M3adl}fEF8Xkp0ZihqJ&hK~4ή7`:Oog؇I{Ax={Xϑ5&|Iײ4n]u&UIJ;Mv;w*Et$E6#HB/:}8(ԹGP) Qd`,{9h9x:qpnH"Г "hQ2T*a\ª-;дbz3i$mfN[9YvȤ2hAb޺vb/5"HB%ap+]bqp)&.z 䤘!;)!wHeAzJjp)_(sRag} sK`p}OSW0W抋xDp޴qIz3XTa|#3cJ{$%ɣUI5.ip-9g\2qrNΌxc ۓ ]36 =g1 AȌiM tVp&|w'L=M{s+**~;m7WY}pe'qhM+k+>{(r_{K47]ʻwד.hzE\{?rtm_GIou3iv&Pg\7X?kYd$ VG U,;pп/~ɦsnmuf]ƶwK2R&V>bzL+G ?4HѠR jM;L/b?~w/N)_ߞ:`,g֑`I97 ݧVզSK ;Lxͧ-y5yͼ+ݳYkОǻ )tA|ky)A]sZOK8R/ͿBd_N3ǽ8o|2*xXD>F:T݆fsXy;E#s0h`t]` lB K9qYëۨV+1H&.G9T1ei.OLI܍ĭd]|mՃ.W+0`ou|+%0q~v_}AEUP+1HaAKk).'Q+#7f5G:NH")ΠoAUB":gHNMΟhX0r,z̝vX ʽ3ø ÌМ};vtf7-4\xe]:äOݡsNCb2^}"s: L?Xg JhRg:N>MɽzWl&?FKp=yT{D8<p|߿qU{6գzB"0uQL =K6&XKiURwfx_2{ƇЀ8P߂ ish [BlRlXЦ{4SUvz]/UOU#E]ʩ6~nQx8khۢ6RݎMz"+,eoK@0/񬫉h_^`Ĺ:kRAF*/-dH?bh$Yޓ͏\Gc?&-⊉r͇/9A6JfRkM\SJ ,1hMR*#iNagHPJf>զ \n Ǜi[n~j{>i<`duY%T [-&0 2phL*p-KH4i|LAd[ F8r=$hA(DžVj Hj*zd%fh`g:qn~wۗ`t] F[z`ݲiๅXDmTp[A^h6DNW/%ĠpᑱdG EOɒIh0 A+08kB_Oku%? scS2O(?HS$iNjiS=ȰNFj|;$ݜ -TسJ݋EMEN[r>L:0>LPZGsQk~{v8bV߿lesIw 9Ǘarֶ<h6Ao)ڂ \~;OfZ2m_F]~8kpKӓ~|~7?}oن<_!ڊ~#AȢ:h seS(H--,lA) G_GTvt,ec$l[m^;ߣ=)m}񴀛B_W PQaҽijnHD@.rRuDN r 'kĥ,=IoRcҦfCt"U5ߠɷE+U97HrG0FSEE,(RVt }(~*?>7ʟߥ :yUt8\>AGC P89\/+]ڡ/Jj&UR0rd~hr5ջU+urcjx68zT|6+,Lu1TXvWEj>c*Z75HD9ԇPE;WWVU7#3Wc^]b͂nؑn9}t\-?2:oݔϗ$fb]h =j[kbɖ:M p3hSp]jr%;)}zog9 oScz:it;ˆZ=is_Ă:чڟ5k<#XK9520'"CIɐ S1qtHf{Gȃ'&Gӧ^^L_O>s~縩C$8$^F )rx!+f(e1GrTO?B/@I H_s' ^V9Ӥ^Dk.7ϟ/Hb2pK*0z2Ҝ g4C`p2nM8QB(q ꔳJ8EzcDy߳|}V/\Y撄s%؇IUX,Qd eJ+gIq&P{ BA(H )LGMHP^єYiS6T* E !#2x*ƌTHex[ԁM]'D[7nj  ,3"|ȯSwz.ש;5L~[$Hc) b`oU Ƙct|3C^r}$QK-9+R=MY{\ C_F燑Rwz]ko9+|If&db/Fv]PeMqD6^Zq KH$O#\w6#Yo}]bx:rvSh:j̷N/v;'zڷ3E0z^ =קwݹ;b54J*&ƗtT iFjf'>5FAu&]Mu5NZ I#jmjk*dY`TE#~p nJ9Pc:uOa8Oo_m?񫷟1Qǯ/`z`\2E5ĽExF?uykUC}bkTqwz9m6%GOYMh;zm'_/7ʥL%W4yj+$+u~H1;rTQKӢTT| ! tvr'&8ƭ :[R~Cԅ&0X' `7;0<2K@V;GJ+5yѸY?0~o oRV*'ο" f2"LGDƧ}i#&_rW̤ݑ >TkwHvܭgUT=Iۣ$ 0cxo$q9ݗ$IZ)w=>IR;OB)SXl˸6U?[B+VUj_mآd J*l!BȔ DDdc;! 2B]m6e3 {/w+KXVs""HE$&U(d'K˅l/K`B<UX-zg՘IDk1bl5ͭZgM+٭+>Ӊֆ쏵>9S-B:VRD=Qu0%Z L|Z!I:̀]QQq!,LI(J1,,VJ\5qVOphT9t]qlmlT Z oө򺼻[{U}Ρku5ofcrX;mXڊW%ŋUT^}DI`N430m>iTZ-QS52nSMh(*(R>RLzSNq飶I*Ffd j0 A\ؚ8=]5,lmdle,  /f|\jXYuUaj v_j {c`zPǸb%QFBW:O * L`^y[Elhd OP`3GB͒h/#p;t$LEt!"'Uˈݚ8{~b j6:6Em2jY s$;o 2 r-earB`Pȕg!L&iU3`Ƞb, k| HmIRTd#:,m[g5֤>E6t|H3%"nuFČ@^2XobA% 9`43q[FY_Oj XɵK{Mp1BHC3.65+SvF8!<֚X%l@q&2R hm\M񐶌t3<]{-/^#׊RՈ`Ŝ(0GZĭz_k]㸮c L4;ftiiQBB9GU{=u[DŽuu=ޒ#qWl-K9\my9Oo 3lP[ --% h' و>8w˶@,<%[6\'eMttNqH‘DD,z]+z`$6ub} SY1|/pL)"Őy*#2<<(J콦B3O<'&l p6]LޱDvTƭwE(-Gk02Ҝ;4s~#{ڏLb)qpqTRmˌ`CIc%*jWJ)3ɖlU˜ 2%P9k9JzM)DNR#OL1(l 0HO)e.[d;t.Blvr|zsue=4 \ OGeh_oj&˅P:E3b*>2,Qp ;CR2mzQzJd~t;vF|8v}7?zyk.0>'Ymwb$U(V˔DtLeT1'$0Rf gR2"lX^L* 20|/`;(H:mY)@9.TSnh@RK$ U]E6#rg|5BO9ǎf]EGt%]?8 M槦4< P2}rqy-CP"F4r2:,| 1Xq *'x[0 T7q|@kA8_nCE W$ewqW u zBj*X)) |d޴xFԬ΍[SGS.:ޕq#e_62Àu`}A`E $h4E.>Ҝ6bE!?WEjԿϏ'6 R)N*k]/.A"PR;F@ 'ю/eRY&䲲 3_ZܢNe b U. I`C)O/u.=doG2X߾x>{݌HZc@ 4@Ty1֖kʜ}g-`|qϐU%T^D&oYbZz"L2[늊*30/bk٩ׅ1eD'ux Zs"(災zbjiH^jգx d %MkNºCԔy몗X|r5 pP SV <{[f>/_Ho,p3ʴ0(z26AD wA!pk|n‰ByC\PU)0[E+!kk>sPLB*C凩X1c^ˈiDk4Ec|fgf[ *_BNo-[2e-5EEחvr%VBݛDJ9]LoLOog5l71bWu'/ױY{6\E3|uZ^n؆+:y-ollv`uC&z):߾7:ES݇^UA5P}8݋|}tNA>=6#;OHcka02i^tJ'R#\+{ : ܉"ΐ!{~:9sywDz/m[r7a_mlNζ 5F7Sh`Ley7!ݚ VcQKM-3tjRR#Q*GT0/(5qu8`Rj͘",E{0;˜lyQ iҪղeeYŮ%]s4tM/-:H=Nyws?J6L% 2:x s@R(5d(O+=RSSϣQP,ʀ) 6KSH̥J)L(Qk!KE=Z;#:읡y2^%WMџCG:;1.Jc:wCզQQVBe,:uvo aLbHTg9_1EZA \R'Λ&OUǕ,+g)/tzxJ/6:˲2,8ayëpe.1z'1bZ>C,`3,ĖBˠvSEW]DV֊."NPk^fQŐ1-U`9pu;](g6T# kA8"A&v<MW؀#|&vBqkbB[)hJ\͚$UBIU^Riƨ4]%\J(Uˮ^ҔjN`ݜ82ySUBKũ/G] ^tjDđvpőlV"|$cp;[+٪=\ִ1 2$Zz*dUWP]F RWX1uruh5᧮V]JuEDI k(SؾRq P z=qE9k4G]% 5VSWW tV]u7H] Q]%1UB)[vj(clJpycUB++@Q^Ba"0i=mAvWvWW"ѪW`1eI+ysU+TSUBObpH^ҒSARW{QSUB{`BZc+Ыs]mcڷjC4.SWۡ'ƮJCW'pgp+5G]%\5E]%wJ(5j+TWD*4[ҰznEs7Pa{EZ<1:aWzF A(<0,M!1*zRb2B0zFv;G<%lNm(Pʍ"VFudXD -J)JFZ@*1ZαEߝ# ?*>5l#rFc~/0I:nd}|0ş.I{0|yxгl<Μ1_t=c@rN!ߣ+)(ԝ0y|V_  ɊN[lB\:$UX WR9Bܸ{(Fōvy;?c:{ކ{+g/M;v߽{;f1BunʪEM"\޴AװJLʷ;uO>{]&gOޙ/7ؽ.IN9@z??Moܹл`-I51eՐhiaч^?QЃJuV=ysw3Z8JZF.uY[Ƶ?Ts$UA|?%< AN/v1(rP2{QMxq7 OC>|D]OU/9e¯`XI?yymUC}UT ָV]Ys+|; [cq8\׉Svnĕ*+DCVR1$E!Ǖ, }F%_k̇49(z?,p\~p:1O^tϚȊ3,tiT?Jŕ|44&D#h 7yTZ׃%q-Gz=2qؐhvm cOp u<@F {_>u:*)z-6S`S'LQdCKj<n;RW;2UdHIdUg({r5LF9P҇@2{ŝ\*Jssx!9l52+b6ФS^]E"Pup[7uK[[؁zeuyeHⲽ[mRtWU=sט]t[SZo Zy~vVo3(/cgQY%GJl3α" 퇮4)oS$ ".S ܽTV(²*5:^.V=~y|L0ǃkR@1q]6q7e"W &T"=Iq6&59MȮHX>b~_/U:Ch縐mD^ dc)#kn:E{Vx%#] J`&`vi:@Q07أ( tovgqܗ(-۝4]1owǥ4.Y0A![:c ` *DQ'rPw僑o']0<,#X<(ǿ|2b6 $er$'mQlv;g<8%(axrCK&if'싟}Y]h9%)G@r2ZM)c{|B/UT80E*yWH!CqHIQs0X_:[1q6+%ꚆLQ#P a/\!FDGVݻwpldf2gil06r-}õGrxkL * Fjy8))"xkaSYG `,d#H Ԩ`M{ (Y&":\WסM >< Ws&K<}p~}# Gʴf*$ż 8p%){ P8.! 7TW[@(u$B d89{ 'E5 igRi6rau4୶&H]Ҟ-U1[T-٬kqcX Ʃ-%幽կn캠aY>%~zЏ5&P548!`p"KLHQhTJxb*Tsʜ#Ձ˼>lQ 9O9,()5!;D)QȢ4ڪ [ъo늋axu=푝s=Dvfzz4 C1j3v3|I) UJ? ha?;2KdVIJP' U[4!22I:h![kY"ؑR~vNcY 0e4sig>W,@3IBegzU6F i&% ]9C%Q"Ф69k5rL<g˵kM!犽9&?n/I=ch`sp]A: ȸ%vzrqn.&P_~ 'T|GnRM4X3 CլMI%ƑW $W 7%" Rqews͓ndFsl6sr)>Y1TBnGIL(L>&.lbYXbZm]jjwڬ+Jlx*)&2OQ*x-Ag4:T7eS2)*!#2YQ %`ђDs->b]HGRQK bl fʆqgcE4E,u#>2`e6JCBq-'h:aQ& Ҷ~\H%G 8%Xv絈2Jx#f(eNs[aXL-ľź*.:ż bg7ޓI^kP G8!F59-]$ڃP/+Oc}] +踭=!=vy}[E=e^a .5Z\8Ks@&FIu0HP eBeT\2@DP.MB&aG$8Yrrڥ.VdR(/^dzx1"Tv qscc I$<#:j9MS?SRJ*&fZЏ$QB-Kͩ*Wod|:Ya=ws|t BWO,f^+KiDWk,eSqFDbB>~:tRk8W4YQ!ZsV.dMt-AZUB*2܁$lˑ"99!D&*2^RJ@uNH]:c5.yV`$S%RlQUzbg\u}KO_! IV_2ټof=}=0e3'3뇡VF} V¹:1*!e4uiv@+8h_lPZ?6m]} 3g2ƀ&\Q1Z! +>15E˦R|OX7#@3˰8umۇ?"A%89::$Ug \EZxZJ(\cEtaaZ ^phbP@{Fn8X2HuBzBܙ -ӒVgjDfUrs-Z`CiQl5}(gÓKp$!AL`u^FFY8/GK@bLRgxh>ttYd4Dhcm=EIrgB0a, N'qsQ~ǃT_ׯ^x&*tmo 0J8md#8hq0>0|oKH'cu0T('ݨ5!LvУY6PݒL.ݖ`/mF̬{m/! F<+MdqT?r3΍FŨeJUTSB.Rَ4ڔ #2&pߙa96HySHUFIF)?>\"%2$|\ڲ c hp;m:4d䨜`:q0AfߚD{oBQG y{ϚI@^4~D1&Nv+Mo&nOlAnvA_jPPfv4\,5ѸqI+`[|?6nSqu@7 eŕq[IlygľGhmiFvp_`yX~ZT8->tI?,%w_YyZT}mԯ5~bM{L\gZKMe-W|6j]pjO.2l{W:i˿wSsvtL݉!k d;kkb|O˜KLy!,xbHee-T/{B*2-k$/D-5סE5Ee ƒw~sK?0lKPk 3,0Iʩ୊,%C̤sĂ ȌVhfŊp,|;;yγw-LvoERJ­̄v)8q֟`RjS)JNwSvHK`a/֗-=wva*>O%y\>ϙ0o="MC>}k!r[p81K0(Z2>rOn7p.X!&e 4Q"UaM% 2V| %Iϲ DaMMMh̘lL`b%FS Dg`l[S3nE)_h=b/DӛSuW^5ȝnj=xb!ğ-67wm,}xQŝz[{޾V{ռ.nJ0wԷ|g2?e,+S5~<,x"\6Ytw号6u3}=Ks}3+ 1 #HfdHf%>M|d]|Fc4grA|M~gY$mИo6  (wM0EZUϣx|<[US)䕦9roh"YPhkcد̦1yJ4/νƜ &c)g0;Jb9ӽĞVb JxIv#1ZrJÝ&W|b]E× KRL% bz@& 8S*V8S99L1I/Ħйx iX~UɎORu~S :=`V3|Va,E:Nx^9INtՊjPĒ &{TE@ zE>]s+X` @QbҕB"ˀV*p+W r,NW%=]] ]i$CW)$q(thV]+@Ib{ t͡]5;3]5l]5C{jO{]5C;FW]鞮z!CW. i;woב8&cl -wReqB3H 17踖R,$-D <\BB]Bf@-DWEe0tňĈBW-Q~IS+X`"#LNW% L+At@t%|bp$Ew+@$QJҕuB#v`vPt(5wP.^fe{ty@֕K+ӕG)TOWHW ڕYjpu0th ΠGIzz5tQ1M׮>py׮HZ¨>]-tucѕE0tpə24DuʣuI]`Q0tp29gӕGzHWk?j.ߏGEMAPIO._FE.,HC?ifX ,1*ֆ P"M_F{h?E< >/E_M˾(ثLTPZXC?k'LIۯok"j0y65#CNjI-aUoX{х'?y;GE?ߔ~vL.&> JW"8JDE__Eo !.)_l9tP |$?}=8Zmm}"RfkRb"_zoJn}+N@I>G=\4hTOgT/*͏a7%j#}۶I"1"M7-~P APBA7P*ZPH̳WJLj_W+d;3@}s,77ۻMi-&SFOvT MZ1+s'7eIKG{K$XzykJ;r*&RO&!VGfUk{1D81RZ{Fyפn;?ˏ'&%@ pF,Pz u}PJ/P^%BWLv= C߇>Zٛ:-xnWhwP5Gg&._:7Nuvt_f1*vCO4V^9XzX$LLffƖEnX%;Txo%Wwo;'hVI*pN|g0$5}H-dcw$modcv{cb<3 $cZ ^q\A_'cV;+66 . h9M:9`-lmI;[TtxnK+eQ}ׅbQ) "շF*\![A9A9hBbأ܌l2R3AX3V ȝmC*';R1EZ0ϣO5(s Ku TR9rApzʖi z~qB%Bde3tGLm5k:F|grN*c˸Z>QP? r:ŇslWUd]l.؀Sֻb $VCW<<ӕG)DOWHWU/XshEcyDRR u1 '˂<JzDRiMaX`JKP0C?d8ZV[3GH2NG.j^oX~3t0YyB,JyqǶ&?|?v_n+RT ;`.|oE76;?Nljkh>^-=oWox}ܭ56#v"~~N,q%h'?uUnUS:ۛlSUB؀E#R+0Qf<l4 e0 2BpV>7}R5-M{]$!\JsgD!(HMM@`eZ-3L3MM扐 TZ{f8^.,LEs83:G14A5e up~URk ?'CW٬kL6`SN?OƃmT ~Ce~L O ~ٞ{6Η Ϗⷫ@Yjě`"@ tF2ImB@Q< *42ͩ&UfJf^ !ǥ*U\L'ޔ*˘ ,3hIfglָ58 e\(z.<)2 iЯrfLwW0|&ӂ9-8VҘ,W*.$0} fetMtw%& &2,7KCJgiǴRT58;l k&jUڽKKeXSZ# Tho+)R:{P*A0 vI nA[CLҤ☁ !fEIq`- 0 ،[S`̇χS?~p:O]aPF-3g]p*65T*(T1D+c$4N]oGW$3iC뉃,&1:%F!)Kꃗ$E5ueX"_^}_իW 9A*[Zadg($^2:łKs hi$f.0l[SgE: P$:SlcBi.&y]5qM2B >DZ H9$WFja O:e{H0a3&7i(ȭb|tVYf%PkE.bS~ͼ ޒ@Ose6tPTO5Ι~?y^{6gEގjȴ)87G3=2w]R8~Φ$y>ђcɡ,+&Iд?Y4o2^90'5GdRQrA*՛0>~rXt0 u3&Bw-DjWO۶[C К x#"S5uz}pNwY9^`6\'eM㴗tNqH‘DD,p]+l8ᑗZ\`=3s /|?)S@1 JHL*#{ωMpuV:o^ȌJbV-&y8Eg--|7)J7ƝQnt8>^^"~uFl!ұqs DnV溕2Ar TZ)Ф)DNR#OL1(l 0Hwne[kđiUn4}"y{s"+%yrzVi}}';sRgLS8c!#- 8 @EF3`1VYE 62( F刊`) NP*:ރ⬗(7NcgۿMΞ9E+:fH©msZ3K+؟FVve(MϲA1*V$a}lP#`%n7KW~< h`k.PHx3jg>@s,JJUk%i 5mYe\fFWYX? ؤ +oCQgWgaM!]4`L؂seOC!]ëAÞ58ŭQ(}#Օ^F=ܨS߲ٳyLzDOQD jv6PI/`ƲlY;mq`zD?s@aϩ)ܳ-UOܳ=+41nT(Gmy2Ҏrvkݚ] NSʳ@:Ny"O,2^Ap['t.؅vyP̴tS [DWۋ!2Wrj;Ӥ]>2m]LtCsH]?ԛCq&[l^c,BRvxoI'."+;H^H=efy9V"D4g5us~!^ Pm@`yhxٟ__t%4r6 #AL!QDW-Le%Gr04z82U3;'3 )!jS(o$b-9BHD,2ɩi9{4D{ɐFY; ZDl haoR0#4'(xAcRg;ά @ӋQ x{|OScT]0sZ>^UZsPӋ:s~hE*-$gH 1§jV@}֬a7 ,?2hSb5 0VJd41TJ%|d1YGHS]C(A&G:f~6,mhA0:IUQ[Z:L`LeT1'тÙT&0ZgbR[S2 3 _a0 G{q$AcX)́r\h婦ЀH"л.TrcXofT&α$a#q1g0~/Q'W1׏KņcS4 O)?YYci4G)lަ\ ۷coY67 jQ"Ť$i!)ܩSN60YaҨ&&:煚"8@uߏo޾~__qTo}+^NfT[W+K)S q/L^oIQN34kW_euo2e_=kOf ɶ'Olrʰ qkpw?}۷ejtmw>D9TP+Q |8'O-6G/GV8h seSӹO@D2lhaa J!m]_q [iJ`/mSN̬饞ݮ`zw%["mmԁDE$Txfj A16`1aXg:(洧a ̩E.3 HH1PK'5b m`! MxxPЮ a<9@A/:!v0AJ0 |}އAGW7 VW=n.LE :ڰs7+a;d+Pݲ"-'/C#^ ^Sxq{1.wLQ[xlVyNĵ%z m 1%e$lDX_Mr)M$<1zoy\yݹbME3T›V EEekfR6Œ~L`>R~+0! PGyo'%~{rx5(*d9NVBh:9 w?P4{}1޼ujoc*Mrk}w9?:ɇp1' Cd.z r_?.h.r\$7?DbtDIe;Smlj5lmJ6ͤ;uS3)jš+TP>49N6̆?; ՜-S9#BUTˬc<6NA5w:inl&U(477;Eܹ;0hcʻwJ*gSW;.y[Cv\.H(v l|BΕe.u9B`* \rmP&q6RRA A (Lg+ZŅ]X" BsJűґ)pNk+C)4F: l@ $#Y@`.*5\1Vٖ]kSr N)3cd8Bҁɍ1P0>4f0ֳgn0CGp I/`9Oǣ~ h2+jHE>ג)E>i @}@Rs Qv);+RP>qCXBFIz/;Qnzhq6?l~=Ȇb/+(: l v|(fli(sMU_?ՠa**Ιs͓>we,¦|Ga3vg޿ٌ[&m 6/A V# ٞ)RZgq.aA'ז魷1YվٴTpmnQ:mh64Ϸquޠ&Pj窼j]Jɿw Q~TU{" ԭoˣt[o=w5I0}PaT_`z s`(XXRpȲ s#P=Wzl4j0w2`I`Bb.UJ+T39)i7,~W R\/qH+>-D}`^RDWYZ>%@qϢ,xpJ*I ͉.u*PU3^AVkVC6%ߕfФ:3_JOpӹ2|`fǎ'WR ̘R!S{]@rݡ 8_װW\Fw&?unizw=]{u]yb"8όW4K7E5TvQ6x=)CN.`608f5Qjy/rJMdH3, iHU6RG GbzsdnJj 65jFu<^#eqs_#c'yqwRWOs]KZNyn %^o+ke-e` `yM>jb w MX5u[mrM}F U+0{+&˘0-%l&? ZJORKNrrwܯ L3*F]%vu;:oG]GzEsi +쪗UWOV 2i\b[>A]Ѭ}IYiG /nĭyu\p`du+¥"tUbXQW]IE]]WWK(4_a@i8^ʌI,bA 8læ%?] {bT^Ɠ \ }ۇ:MjQ?L>"Z M1W`D+ @Q 9qK)"SHjI :BTdF)Ng]XWlF3L=֘悬l~l_Q7Q7O@GL66͝r?6͐j H%0ra'&Ѿ%n ` B]n_;ܾ:Fan_W0l0-~b~l~7 } s}\0' s}\0' s}\000aAo<&pS ҾJ"yx ͉.uRff{~im4bWg!h03OY[ &ZxBNzD3`l9OE>J*a_aZ֊XPު/&аfbooO#p:/MgiKgdF ۱H>y'a _V޵Q(;Q[욼s-^l>JmĂAz"Z+rI4 r|ZoRo.K.u?ݴL^H.u:7ʝ@¤(Z53s=r3͘d#Im5m;7ٻ1n33s17C\y/r{B$\r%yt_ʜ(&>ԃ9ݔʸhN3˖GԹ=N 9F"ҽBs)(ux.m;솾.@0! H ˥CRJ*%) lv> O;~7+S*,-8::\^ݪQ`ƴ I|8鏻QwK;sī"Fk+.` 9j}O[cݩnOnFt^]yW^x}=~p3#ͥ| bw~Q-]p >^OJɅ& &JB4Rb|9&2ƍda`,|8=]&u#UVSDiV3U3z$ȥG_AN. {z J9^P2au]]8LJӳ'o޽MO_wgߞaN~>9;}v`&d&ؘ7aaHzyk^M*9?LS.Y>z.fAs\@~|znMӟt45*%Еw|`eCw%W |sT! 0z@/6=cc6g(Ugn߷poBR,LD~c&)#attAh|:4(s`y6Z8:(?ux&'k=&̉iFQ!0%IŬtI>i`g&WocS -gz~ߗoGs=w`M%L׋[wtgBP/emeVW 7ME`\h?m$FɰZB )`҈@nKb#An&xK5U^K`$VbO#DjE,#JHIM \PN ٪P`B<UX-zg՘IDk1Ml5ͭHJ+ޅZIAĝO!cO0"䩳ya%E(+ ֒cHcbOv[61$5ILX 88H$roDʵݲ-v^|TXFutfW\Z.Y㨖\4LF7Q|2;ou'IY.w99עaKKiVÇ&LSDpqtRi6jXD9Np-jN5jFd@ܢJH1rKMPʫ ,3qmkyX3UZӅIƺP Eօ[Յ3& .gXrߌݴɯn(L?'߸f`jr``zc܃a%QFBtHTX1r,&ۭhd0 3 j-$!ʒh/#vHHLBDn?bam`bnM:ڪeveXs";o `XZbrB`+n=dXL88f`!2 hG]p@%1d#FuUڲ>laiԧQχw&l-XW#51kUc8L<IvjeEBc tƋ i*M*Id`(1%kyHLzz ",iP΁%MK#1s)e;k/^8.k|I:z1bzz1U)hBLR\?{WƑd#}r8c۰󀇗F#䒔m} "%%43==uUuuUAkM6 8\aFd6.‡!mxnq 6Ky}]4kZv~|gϷ*JNpy? FhQXNp5mKn@K  %rp? /1w~ ?+\2߀߶1D?Ir#Yp O(-sR~~~ۻ/I]4mhr^y:Fkm{ILР2) bJ:a35c|42hn{tKA3-}\Z}-ּ8r}_ h(W\ZgDGoApQ0axf`HB y Қ)1H´D7$-,xGI6g$I]UPJ{9͎xЊOW7z|ꩻ -]Y ;zzKgj{ SRŔ)^?oۀ"g \!@o54\`D$@MB,7wR@]˶Yyւ/zmNg-(mW舥s{D:!I\׈\8~z\2WXy.- O}wߟRcbH<̑HTV%^S' 6pCt6]7yH,*V%*bS4|XQ{!5m<~ñSaf,6Ug2៛aJR&`]N&r+4ngۙ՝9d KP8(5qZA9IM<^Ơ&H@STۙvʑiWn4}@\c׵ЅN,@BtKV1Mu k+38& alQp>i ZS\Ȉr4*kc&PfTetۨQ1,%! j`RE>3Z91l Ӹ,q^kWwvo:E鬾\4,WܗgG}w¿4G"JQX3fen 1EB#-|Hsz& .yx כ)I57r!N9 Ơ51J:9KۨD8!A)ЛE{i]f v~j>kO'Ymwb$U(VK S8U )33L`õi,WxAtT'wQǑu2Y)@9.TSnh@RK$ UԻ ;F\4ێgxfMv HxTMJ%٨ pIX Q nQeDxieF:&^D^…GrXQ숧Ǿ1cch0  Ǖqq{9քxrk9h9)TSF#z_?p?|oه]?z>E8\t/ՕtoTl6O̍˟Y$PɊyevWM`a ^9.WXjYQR݋E%EN]rre &W0La\PC7j>N+~_^z8g~՛wy^' KKRS(q/L7(ˏ ﲺ` gƓf =(wAzMq}f8Nt8>޼zۗ?SjEڴ{ޣ+6ЂGRv3Ñ/Ȣ:h seSq5BD2lA¢BH8TdL֋͒9Ԅ4Un5m^`j[lHik ]"@I GNT;eJĘ C,xic° tPiOsAb57$UJI.rRJ>Dt y$EZĥ4K/k!Mxxb~  {8d?BJNrXcb{ &HY!gfGC> ^WsM,c~Vd'_"{QrKa뼇[|˷^y}J DzzNߵzF֜;8|wy/, uc: 7c"51*ZW%5HDsٻPIaKkYWMrY݌l/WޢNe5 ]Wٌ!A7.^avh.uSr]K|˟'fd:ZP/}ԟL(p|AR>Ϯ{ryPtG"~wRތJ1.ܴE_8.!'7gSP3'?^=~i>CY?L&]z؅9TחogUbG{Xy`}VG}w3>YNo߾kXhofTTQJ=H?ó¥sՏEYC}vPLjRQ@(5 vM ɇdz^ZV ~^J٫0%3Yz]谯%_-w F@˪B{̂{ϿiIYumٳAR.\Ӑt3^.6'ps)ٯ1JYUgڡ0?WGD@YuŤR}IGgj@6 YoZTTJs={v]PKGm`Pm2Lpu34PSUlѦTMZNͭ|l=_%_tb/\b;nUoʽ3O{H~CL@?2&xDr1`ڡ`΋e{T2*^iIUVYfAjL$9)*S7&kߜ[cY8hݙjB4#XK95UR2$qFbăȌ^hfNJpi⿇ geguW3y)Y߀. &.y6˟7CܸuP9I&pNnx0<+Ԏ pfFï:T=4WP_ I5` {ЪEmyyҰ U6Y~po&̂jwd~a .ޠ~AQlTNƃII/R8Q|({iE_l2l<?/J?g(7%p:uk2h,"4 s&ύ. B$Μؙ;s⎚ $Zc(Fk_$2!XP(8%U;:!MA(Pʍ"V!Ew9lVJVV¿D  ڜ _`GR*7(ڠs{ӓ/6|0 2UӈH#ւ ]|ԛL祴LɗLe%Gr_[Kqn=Zi םər#X2!a޳4%@-w X fRa#Dt",ܨ!c‭iU"bK(F[@!% 3Bs46,g6g/?rz:ﹸxvOM[TNt=Z 10 $fJDڲ;8 %0m]%= Vm+@o8'DutJID s[CW .Em]R ]if-+j ]%5| ye0T;+f׳=(V0HWnۡd;&][]P5tUB+ŮUBTGWOFUUk bBW -%NW %c]=A ("Ui[*t*T'HWLs%Y k*[CW .Smv7RᎮ ]q[+Φ~wn&fי y6{P !ړ`8ܤ·Ni+5q6ymc U6:*:: (8C)<¹▱?GW>9m`0|a)\bA(<ԝYz TՄ}ziߗ?>V0chkc◛aLn4d5 "`ι`1X>qKž^7b0b0̣!gZ\asŀ ۬;EG@Y/X:bҹVpbSFI[^fq|*{ɱz5Qs-9˝R 3|G+%µƢ=!l`ZmߛURN~*Ɣm\Be[ Rt(]=ARH -[DWx7`*-tʝwI(UJ#i f5e;ߚP]5!L8'NW,C.~CKhPsᷠ+]c%o]a c\BW ĻNW "]=A"eZvȱ^w$K@FЛ^Lm=i TaCpYQT*<R܊ @ӕQ)'t銔 &"~>kj hTQ` ]_ %c+(#Jg෣b0}N8YT ޻$3Bn9"?|}C:>ޔ;_7~ޞ]Vjиt.d6,Q*e~D8 EͿ.?y`|~p93y u؝q#&׏?'ǖ JqLeojv9#tq+:W!cBvwL^wiڜMG'}ətQЗ-쁛1A{T?X.[r(`1\P&:4R5fх:""5֧)L'|j^Ucױ\gkkZ}s'Wjeߺ5'(=3dRWY-Z3cvK&UZ0Q37mЌ:â^u9EJfqhѵb$~h ͇O bwkK͆vl乚n kKJ66ͩ)cĜDh=vX Bu؍q mq}Ckcͅj!SRW{OpGDk3G6wZiM1^ڣa'?,'TD2; 0; xB. Ye)ރ%v;٢0FGxt! A uQE~$iHg8eU!m%=:fDyKƜ 9s,O/Ss.ޟ7g!1TRc{jH%%VbP9\:'t >]oaI>,QێMѵ5b'ԈShі8:AOkc0)a@zQimDiivIVCJ 6{KFC&flڸ\=D@,X'4- BU)>drM8ŻNƢfnJ>{(R.Y hjA 1Ȏ ўF }m.5뎼0Q#_fHHs`eL$XEAੱŁ@EE{0O4:685E @rܪCZ]]9ɠ-.ϤKgǚژ[]Qt%ρ ɺ( Ņĕftc]=^џ^ *2`HJƬC6笆sBE(O_;wMBAqUSO9.H1$؎~okJ R5R c  ʄWg` QAq)t iSPNJXvl&!̂ - %juefH57]\?,1u6 DaKc[PED&TD;k׹;f[*:#χPɨ[s6 : s<`&b!0hӀ]ƾ8*)ԙ*Q̓\6ud&];F*AC5lD]`#)#͂EUPhϲ (Ez@jA!N9 쮺r{f!EUDI)bu+եa0r yk7!!ѿ99OAYh8(, 6A ˒q1!+aMh#ǻ =sA །wA9/whz1#.UUq1oT U86ʐvNZ'D̿h6`8]u&ޘm;.O<#e-x}66Z01^vumz`-$ >:%@uPyPmJ_6#+E$W{HB+uT2: !'y֣EΈ A9AJ$rA6iJ,CC6fd8=hY{'&A-H#38hGm`QE,TG7>/XżmC5YKae0D!v*"}w?. O!z*ce1-V XB;KSv <ŢuԆHTʗv4:8Lv51E-`E%P2vڱ", "+P(vmcY`ň޲վgyF0΀`ȸ)YCq.:jUàe¦f@ 2\H힟fhJ7a#U2'EkO^ғ8 8XkCW$ЭxB< \T*΃6TS.ʝwKC1ˡ옕CH28د(P@7-lNОm=5lvkrM[;0/>dOpU jkr٨},s[h{N, @Ѧ'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N q:~UG( +: h=?{''ЋtŠ8H@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 b@A:9h=goZ@@ Yܝ@@' t!93 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@/ d__p_hyN C8^h< '8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N q'.\Zd,5-o7/Z=XwP6OO00id\qb=%5j5%ez%b\zƥ9X)p)ZjqJˡ+"^ ]nP9 r%j^]9KiEt)Z],Zzr틤+o]\զ;j+o@yvCWaZ]%īWsju@ !kz25+i-t5g ( ]yskJ[GT.ba(}^t@WNCZ]Ōʀjh}xt5PFzte NxMjehzt5PzztEI+jjuQW@*@ +j`چRNVO+$#K+kuHx;ݰ7v1x>^~lNO6T|s_#/ߚ.l 636No{1f(\0 ֤yHOʘv_޳y{W>BaญKze{㱫4S9N o9[x57(=ٜoyZd~cgOg)"wnׇ`櫦|h34m drg}0>2۶>hSȄI!'g.9R0gZt83`gaL&6,Hl8HMV{oZ2)TK6;dT׭STݪzDע28 k d:o Zy+_jEܰEQ|vgtrՕDDٶ5(cY kBFv-'t(k4tu8t;-.M`UY{AxՊV3tp ]!Zb@G|MtXV{=]m)o+,nXoAW]u Lu]+DQΏ+DUOWHWq;+lCWW{rvBmۯg+NM F;CWwƻB´emKOWCWrтe,) ]!\fBWV޻Btut%)*]D3brvDeOWHWQт5[{ߢc;3thm TtutRu3tp ]!ZNWb ]Z5Rw '~ P=prD Ttnwdm+D@aL;[j;2lײ= Z CW[e f 2=]ZT0ڂw]+@ t( :EWXBv퓟xB8^v_3ۡ4wut%$Dvwp#"t( mpF &iH'|9'ZlUlk٦qНMV j>[YG[l٪:@pTJuHbkJb#\"m؈R^bVҊlzl.5]+D+Z?ߊ(e?"xt)Ct ++th|/p(e] }޸'߯t({:L%1uw %+GDsͶ`]ĶkɞoCMCZv`݂lOWV= :5+@m;]J{:Dbh;DWt.]+D+H QJ7k@ µ+th%k=]!J{:@[%1(3tpUg 5>4*LZ@uegGz1BdžhuJF嬵'U1"'41%I&Ɉ`#L,zP 3owy5@ijP+y©ӫ޿f8w'ՕaY7:Fj΀6W-r*6 Nbe?Mޡj\^!C7$Un^d]8M||χk %AI̞H3^GEM[%Yri8?+fYab8k{[W:Zλ*T7YlˣfЁ21]O´G U:&ՉKJhiM \VyYQ 3"hZyT`^\YC麙C|6åx}-}YE*ul%4oh^:Mfar>UW`4 4!.Q5ţ+g<Exi\~j`Fua'U/V.1-^ͯA+ZjqaSW)/&O|m[7?]er|ွl*J]xf;*|8Ëc6 b_ 1:3`GAl`4RY3߬VܻCq{x}z oF n0 }`7)vt^t _p`׮rCF<9>LX-1N/()1vf|9@{>|Ğ/ NY!f%[@LtPª J*yϸ ʂ>aDПC:5팞F3zPRz=}zRQoJ.7;XS07*`,lhւzeM-Db0ZS)+5JlJc4(Riw>0q"vB$w@N& n6w-gbNb iySJa^0cX,d4.1i7$Cc=T4}N) noM)zAyt)ehuYJ{6=ùT/;){R [B|!TWioٵ^\)j;Uxqteź)b > H=Q>^fU)lԲ#eիnMxfp<cC1*ids <,yVsk3Q%VQN,e.I&j&Ae*OlCp]R-& Ixh$qɍTyĄjcpo7Q Tt~8v"Ll =+$F {%~1i5a&)gp ު̓Y&8aY.ݓ*Iye3[siZ]v%a 3:ڇt k,[-22:Gx'8ASԓ F꘭GW*)*T ( M3ccpg|Ƹ1ؕ u\z.|R.\!²3>^iq(pvwer_$(h44󯜱HM)htfwLFVڹufteLhAY*M8(csóπbEF3MtPÛ5Qgv̄MªR&A183cMb]Y4ڦgރC#q <?BG'ie% )| ,Z#07$Y̩1 .傩LEraD2 {֍y` ̻PLgϗmz|L xӏc迏Nܼfg"O4)k< c^:.瘣gڄ" JSp EN,S BB +A0KМUZ)OR80nčyaK+h:j>en螥j>t} ֛-vݓveZd{ܨWtkR Cx9si-l%S7; gt.3O>*9Z'-/mZl:#c U`(DF:#_*kW|mv =t|>Qf IRErBȵ,sMdMc\YYCMqEbͦdo#w}%#*T"$X= &tË﫴ο͞`v^c;p }IJEpcѥɸ`f:i*2d hvYzup~f3s&霴 弗4- f]cr2JA 2P2\?3яYlxe՛wow ,Oi4?мӧT8 ϸsAk8|$zBLH"sF35ڂ2{URu%.IVg>QM`&'GfL4Hs ^g} r\]Za3^CErMɆAO]$PK9vaT8#-ئzpaF_N8#*=dUGVw=9hCRs CyCCr&QpM sW^$o}濫/{p<9xH |qs;x$› S` 2yy^<`S"8%sHIMöfOM_UW% ["ZXXtRi'b=b)XPP-5uپY3gojua$lP̺v/v`j[ّօO.t o\S!(cF4r2:,| 1g1=(mLV89\#Y],Ȅz rh^q#_WEko4998}s(ͯ/9:+W] :/j"pxZSy~G_~:O/߼)F/jFo126²;߽p60+ާ0ge+(8xAtF sSş_VQ`=Լ~S1 ؋ohb¼7]˼k.jeިevk?OF¥Fcͫ@P_b%=>>,i'CqjLٖkUYGlR}­p5XIa*`ahB%jSi1*Sum0-e.,!Ӑf~4˅oLpa6e>+Az(Q_g Uҝ2XW](>Q~KvnF? oo3"di*\LG17\eު]oPKf8&1jIxviքjbZnL_ld[ooXE~n`0.2K54ŘAkdR75֬GW.ùmX&? לh $-r吾4ko:ޑxdU3PV#g0ŏ,Bm.6 Qm.m -u.y7mIT1i(4P]jVe&o]cZj*qN2rÃ@[n5 $Dy8;EGCV`M15Pʴc$$AßQzixaZ:v#bWcfyf!.y]mRI{҇'"}PS}UobxqD^Z`Gjfv:: J߾*1չM(/^1&IVg=Խ+2̭b4uEDf`QH`3}npTB$݉{wޝDJI#B   ǴJ`9\' 閡r 8M(rneyaYqGkT~b%#RHTSwNLDF$o )=<=8[Ï` ð]{i,b HM!fqOtrYMgӋp2@]a9_[Kqn=Zi {Ǚ{8,P Y@`)c7EK}i9 Drjp8SGC‭iU"bK(F[{9A ;ƙs&R/rz6n,/޼}t`걡t|vrU6 fWNExuv^Wxz^ c1js<7p!O($pNd@F$z)ls') A@M`ɉFX\bgNH b(ñ1Pmvegb䐤XHMLc1>$2D"'paw+q:`Ž@XNi]xi ^IO<ox,Gr 9ٌe) c,qHBǴX$7)(ſ^dx4fGFzEMGGY1ayxRr?tϏk"U+VE&òVB<;/y8ʞ1|\Wqsѻ4g`2|']]?S.ƅ ?enru d fiK'C}+ke'͋f PTd=&HV^YKc $!q*Ϋ5CeMD64O&q6ʹYRԩ09c-g"/A*lo\gZ~ .vm aF?#ZoP_e=@%؉0! (™\s k]XX@h]` Π5Z% PyÓt s"X$* yN%`~`zVW"h4WS~ "mRpڣ 6; >{?k24ﳢfˬ8& f2Z(Z,=JL p;-yuӼ6asʄ> 4OP0R%uʨ2^aZ Sl1)IUjeƭG3ReK#~H)ttI^Zqm]1C;nk 7˾bӭp֎2&((N`I*"Vt6*1̙%Pw&Rb&yXJ;f+K)PDBqy1LJpv w>S>豋E vP$Fe16ڐajL0BFV2ͷX #. ]jN8. åh2DUD&#EDM7{,] [Ry^#/. !JD!GHLy w A",p"74Rc;4&`\\w [uh+mnFo$>F"Ø7Ni9W/\YRܰ9?4<0s0T V0bf.QSw|P{Y1_ l]+ jU\(hʄ,4ͩR*JG(R9WS@锃R;ĬW逰1l'g)Oc.*5XMn{:1_+ 9Z}y9v_^5Ufǎ)£+î ڶ8#.|ߜ aȷܝkssc̱|:IaHE>ג)E>i ߃#ey1 ˵ٟX C_X V@xw N$-'p6m7SƜ`"ˆ2?5<(;ʥ1e$e򷡑K=]o{x:l&ٯV]g˫DWi%oI烎[gV;\WUGmR ڸ!A*m<i IA &u,9mΌ6>6Dcr u@jªk(k+ ξ"$_ 5+1Z:u=wHpaZ>Tf)ik #b +M\<rx+<(sgb@1  !|0B؅|y*Uk\Zaňm(F,B0#Bbj>HԔ1тFQ4`pHºB󦾝=-X(|hk1Te<)|WƱZʄ2xM>jb w MX5u[]5W>w|=ynSm{x\Aj9X+ISTI'trGJTjLD䊷F1ljQ6,alDHyyoU"iL Z ;: qQxiG'keF3V4(^8*T3Ĵ32FXsb$NP:yO̸?\HwD@4:&tPafXTЉV2э Lj |&$ CQԓFßXF, zLZ*"M׈ZauHtDD OPYˡ)%j?d鴘7GC23hZ8%,* +9ERKs< D~"u[egKc/Ro/2DKr \r%Ifr=0 %E}`ָT0`eNVWizu^f̻epv]h1jO {Gm2ۥYsl+NȺ\[:] oе,!̽g9iorõb+[rs\g>~ q#a3o}>?}Z 5;7epScozt{c{~{߿>~>|_o߿{$\N-,S5˛޴ilo4MSO;v]>vWY ơ(cϿ{tO:-fvcAy0 j~jg;W-0`*Q~mb3 ǸGzo6q.&z[_c 8vCWf/pT?ǗSS\ʙE 4/_Q1?b94NlOJ1jHbrhB`yā.y#:Ā \Rc`Vvp G UUƧ_LKx_ؠC7 @a( 0P @IH.  P @a( 0P @a( 0P @a( 0~ aҜcوlX6b,1Fe#ƲcوlX6b sfM4x\vkMP8r_8tX jy}ڨړ羥(ˆF<1('Q w>!g(;]4-y8Nz9` YuuZ]q\q1rrD%)G B ',RH>I.Ni6ZES% -Fp 47dC}(@rToIpDkDj[-[KJ@ᧈ+Mܛ'd9jD.l68/cIAPK $"x.#Du:gs&q^pqS#1))TTsZ`{H+p!I *lI"lR(}#+*ӧiVEָ`˗<]k}(Jpn-s7R!VN!zgHZfgHZQZD%[HZVa>c>z:4⨃ؘ:k<&SJHP 5@G*R휠Q`xlJ*#SԒT ML\YU J"Z7'zB\aknիzj޴=)ٽ*?O`T 3T 4gpPH!J-X -'N5* Kmdhښ"uI{)ZYTX;v ^pQ"~PMI}f!,=B ~ִPߵ0Q8]8"Eu<S;L@CTT|Hjl7->6.qniEm\̈́V()5έ9x)Qыh2l5F+_d2 ejq~8bI?4VŪ?̳@.㠚uyҼ9!Ćc~;}ݯ'u}7 5!Y0>Wdȶp}"9 ѳ{:gHF|r@!ۡrKsmA;=ԓbrS?jZhaC`\.UIsB#@ 5NRy+#HRw;!N+v%[;Tc4xEc40jI5$> B$ :]yT1HvCsr)$Q"Ф69k5I@vؙ8wher[11|YF|m&T1 ̻ătX93]Pc$ePQdfN5r mD HG4ςO!ZGPI0T:RD3qnWxs3v豞Ψ4;'QicQi>$~rvP^jF[fu19i2ܒ-nSv6^{`wjod)DA 0pvtb1Y'j^0o /KF0S(Jw$xKCbDsPDN[YJߵeL-cwX3[m,Qm\lהz'e7_4AfuԶ^{7n9`:9UڔT~\B2) ɃGcm)-"iN=qF4qΉ7o䭿T}(8&M␲qyͣ*IJ|Np"v&*#jvq\Ok:;%b)vśFa ?G1)Z#! NK+sї%b>b.n=ltljYpAKWk|9fu5p'\؆Ip%}KM%A#wh"&+j뤯bDKcI(4 [J|e KDOh4AjEsޣ<\D/3uuczfl $ TR$NeIr.<O5ZL%akVĹ;Xo(!C5!TY''_NKNmuQ 3AL#4_Qo<9lѦŗ/'LmAd/cvц&hw )8̂3gzK Gb95ɥ"9'BB5'#TEPَhٞV; Tx㩎F&ꀉ` pphiFAmWa.rr:繺|*0'[dtz@'W@LσWtWu/F;Xsn c)UJ@99xƉOjyWl3ÿm yɲxwwcy4<`m 0$ᡃ`"ĂW:%Zm.eJ$lCMS76H SqC}$Wͥ0]Hv%n,FRLmep2,C۩8ˠ'T%w/[OS5d+vZ㪎_qbW{ǙU=y?Unb?ƣ٤zUQ„תYa58h<{য়;ս~^Wc/KW}_f[\Z"M>qBw~3|su7ؿa4iK' #[@U~MOqH3;KƕTUVVg;q┏ CV{+IzB5脑q5LF'b^- /M&C|̀K~BpP3~~e^kp/WwFϻaZ}gt0d,lK0l5wt~h񆑊gy>h2$!Hh;S׿G`~zzn!GMtIìco8zqGYRDMPqn~P EN%b&C<^_~ޯzNZ2?9o$)3NEw? M0|DQB_>h_C5[j||>iFM]CךbT^kJcgjGxA能~Ag43+{=|o{8/%G_+NJn`nad*) ]^еg"^2.mv^0>V{aG\ÚC"(~7ѬuI(No}Z-95qR$j`(#ui+W (":Â;SMLhJQ# 'o|k9m1l_/<1\$m%HA}$je9'{M7+ؾdM*tJyMfT-=?Ks+D2E3fen+C pUFƺ[0;nwXw4Hz-#}捓)B t m41TJ%|d1!) J5\e-TGcDz `>uSd! (_A*q_[t_BtǍZH0:IUQ2 2ƜF f gR{-"l\.hNh8s# pzNqrCZ"aޅfE,YydcOPڕ2"l2y,xGI4 ״|cdmzwg鈧y;[TT݂ŞAty5i/4yA{z:4OF'x2VRxieF:&^_"JĠpᑱx2-4<̷гzZyh Fq2P x5ǚ)k"1FX~@ss乍 Q-*T)-*]y(ǖd㎩G(zj.4VIhS'˟V)>ԷJt8tr2Y;iJz~8(7ߚ#MͅhSXaGoG}w>)y{8vf)+J_{\n]PL\0Jh! 9Fh]<vj>쏋Ӷ?/_x?/N8*>z ;Ä:Ö_;\ғ[{_IQ34ٓ'č'β_Of3~6pg05q7xի޾|~Ҥ5Սι=Ѓ:l? ;~rdgew[|dE=-7AyLp4VzJ$A` Nf2"mCDG#Jejd,͂5Ԅ4UDH3kz{cWS߀Ȇ&@<-|IJrT?pG1pDQ  =altH(#MԜ3Mp2fڽ; v^K*c5r2b^b`z`0qÈ)洧n# 2pr3ݗGǟ?kG ?<=:āx_.7_za8tHMhzW7ZWAx/V`ճNtT~ߒΛ1ygEdu]}~;ɮHyw ??Pn}}P0%:UI,!VjyoA?fo:{?=u?1)`,9w~1a[AQGeʑOm,R_1U\9zVtԏ<#^IJ㌞v?f|Ȟ]tDbd8u^=+ Kjxi>!N tH=bz=^\ g@NxU}G_/(+\ #bF5!UL\Ouƅgh Rn!>bdoԯkbtzզn$՜OM2 AܧJ+%K%s ٸHKc#kԘiΡ9=lu]ڼX3Zb;"I(e|~@0|K:fٖIYK%ULյChEl ;vF\2IUMrAUVOI&AjL>**3U&k-PlW,iv#dFOD4vo깚HkOY=e_{&RN5[DB IP=ٕɽ`y.]GVh 3,l` G<ۏt_nx"Q(t A!Hɔ(:kaR#I ՚5zEx¸ۍ`j8QB(q ꔳJ8Ez0in^22%2C>% Z'g07bVl˺wi&,gJ.~I}LS-i՜!@SA|uEŕB65>ٲLןLͺuu/>Z<H6C.UNf5<Γ$H6o'fF_7/ xN/8js9B6!Ww~l;tp(ŘHmp6HFY@SJxP>B}};Πf ru4Sf̆" |(rz%|(lmW/_^KoND܇vR_Lg|I9_Ԙ_]Ujy/݆M&dB_@-a+mE{߃Huٚ`c9j3VzrԵiUͷZ)k|ڵB5l[I~$-xk$Ig0N3l.KMx s X(XX[e66͇62ظe[H,5E0jS4\4T39)aZTP1G Eέ,k2,"sGkZ U)hWQh**6ii+wߝoҼ4tmcJ fs=lE{}m_z#2土8Wa1F1sɭ`)$mݍ qWRk)Hܙ0dM`M[J{`}w\<&`=yů?F V}sx%(*AB⠽:,N 1 l11*A0bO[ϽN(y"+I@=oO(Yys36M{&Gfa+|XSsP}eP@QJ4r2>=,$> *'<4 9bz6l݈;{}_Q͸8˾0Ad£"/C3:Kqob<.~ArDHO-5hhA=Ygme!b%y@iޛmymi׶/xs"6 4T <[@fRkMHYfR",1hMR*7uLs56D8!A)٨#aR/1",DDꥦa$EV 1#gCrP~+eJyhn/E%CDqg-͂`UD{L !тÙT&$__ZD "\M]c#qDG4.’B+O5$DPA ͊ XH>še ^JJ8,NJ+"29 Q 51ry`mzw'ICy;[#Q4Uw}mAtuV94yA{z0vǹcڨ(*^h6DNW81%\xd,b <O8>-,yFQ`09 +-3kͱ&ģgJx8_ ~P}ZI8.^"}\S$goKMCșꯪp-G#I _@--:MI[-*T̓4e*_y(ǖ(T#=5P"E* ?dPo޽G>,gg''͡ʺv},?ԍ+w% ɆYqU2kS2<iL|w,M6兵 {m?sv3vm'>յ-׬,${%n[S4%M::ku(.PӡXԡJUj>,__<sGUf^ky7Q.->~}439FP5wIQ34mٓ'YS0(Y=qKӃr f/Y1q(=kp?^}Nj_:RLbs%WApE<4|Y&(ϝS N[D"D06߯ӳhS2k:$lRW/uvݮwW% #:p]貈-ؿq3~T򦉖7 =Ⱦ\I'=_eWEkg0:=aMQϲUWΫݨ/Rq~F?qqXwɢޫ=?/^+Aj}p$mn Xekq'e'ꦟRW+R?{ӚIv.ƃΛ^}]^^w RIq 'ii7fp v]l v=0lH]ccb9SJ[ݷ:KQR xY${ʿ}ĝrc逤ѣP.YRYlU4Z*; -Hp#NnUM\_x{0q{]ENjU|ur+rWԵ"uEu"[Ekk į v]+\'|u/rY+B65Y]?A,gYw^ZM8 X5:`NëoQflŜX?Xy)7\vS|/WT';[W"mШMlenI0u?nu.x*~7~ T޼[A[%&g*J)3nv"vڍE~mk<==x3{+\}H屫o/<RSMciI Dt0ު*5߲ ԲSk (ⴓ{$(89|\zbjiHZgcR t4/¸k- ZX¢ke^Z567']mqჷUW فnBWc+܉aKR~2~M+;C`/y=g>{ϙS( RJ=#8c{p=שbι.3'C؄%,N9S$`W1Cֶ( DUX-zg՘ `Le4zl5B^Rȶ5uv䫨l7% |t1tס:lF /OcThښPnhWQ2\зLfGERf_Y[KJnTJ/NCXI5࿋UЦI&֟JUgzv|X[M{Ip乒h0 9}fn;A?no ݳE7lП tS'b[l)Y4jwzשH'{uH7u}(]T;YD0@ >7V\H*!\??.* %\و$anM" BPpLKvqB 2r 8M(rne̚aYqGkD j,LjrcϝO֌?|:;ЬKl|!=ALܦomA+>WQMgO>}hg*C( 8֓ƑpǙ{Ǚg:%j= HSBrPP`-9BHD,2ɩL d*sVA-m= CJf/hlg.,]l؀Y`?9tĨ nZpPu<<;wU 6 fW 2F6l .kkֻ7hnjq8'F8y@F]K}εp*3&'iOe5T^1gZuw\x^"*gf nIV: *{˃L#ܥp?mYhq;2)ʸ.R's do߇T֜N3~i +tBQ "pAC8RO y ojp^Z?B\R"1|Speɸl5ϱu榷X\S9CI6G G_e}w@$u8!D:O[5G|00vrLe_d#3.U/Yt誳-⇔gSc?Zz(3p-qno!0eѾa(cKA*`"`eH1zT "Gox3Pmp6]LLRnUޝ.7Ӗ`\άLK:Ӳ㴩DI(hV /~V L猏>l9G)98K*i-]έuieGUjy̳)K[C{`\PyQIU3znA󢚳ٰ<Q! qVLFeh [CathAk.`40|f%zpjy2c@Εe.-sp9BbR9c6([8S{wEbu]Y{zkKͮ68{r$B:]sڇ*&?Hdr^G_8L0ǣa Lyg6$Qs-9˝R&=H HZN􀮏Ŧ(λ٤_]us8RĪ8YOj|[@Xl}]7*\0?%Wƃ탤^AEګK pjPTњ;柼&mV0clsfȵ1O&zG{mਕ iz =ڣ(ksۃ%덾25?=!--b?S +rӃz ]9VD)  KrYgRw5z>O|bQLy``S.KҴZX* ;R.[4'76>xTܿ)Ճ8mz[O5蓮)iG<r@q*Wygb@1jÞ[ !ӷBFj+U#k\> >]n$zaR/1",$냉KM-a$EVGH_9BnЬB-*R궴9jqׂd<-|ǷюVւ3hDD&p TQ͍QmQ'(F;^mI[gkwOMvܓ C)DH99X)s$S'Odx޵#"eg6Y+@9g]Ù-y,;غX%Զ խf]]M8THL'+>؋]ϙ d[5SD$DO^r PTB RA*S 4&˾VVN_9A\{Qr'Ѡ$uY&.c),L@lSe EiTCzW) Fѱ'{'ҍd=BfB' Cq't0Oʐ7ľK4XAZ~k@DNBJɓOv&`+WQͣӲNK#zd }ZLlZ1ȓΒaG-GP US1,ސ:6Ы2 k-J)HRցߟ2+"PB!v5C1ҫ[uD{;ZEEh%"r/-H.,Wt ,4m] o.\2/]<#8L.)VUJzzm"{͚U"9,QEi5M?jDfZM>vaw,ysaǾ&]5G>MLnˌk\X~EZ{ dl]BQq];;-Ī]ْ6[:Y nfbQQ ͣ:GU:VWؗ4g6R6>=:\ܑdC<ԲNg :߿|i?T~?}ؗtfm@{skk@J}z0h<Bvn:]ΚrgWXl6?;"NqT8!tOb[cu JJLOxŁ8BEWjT0ʠlKThJϽ0ü56qr8z;{Ǝ)I֕ :uM@ Q+8&th[N6vtM]:ņtbInycvNC+>ر/;='žW9L/äI`p]͙|L(xg<j'{Ar)YC,uit)` D 3+ޏtij |MIP&&oaJZ{YJ:eRE%X!"Tyxv4}DlWIW4ubm}S9WY7Wo t1 zF c9ؓ*9 ~Iݦ{R${ =1I"4QpǦ3ָ_]2XrMbqBZ V .$bEP%2Q@,3GQdRHm H(D3ʉ``tSs9[S SR}$HH/dMĹfݭ)v \ǟ>TL"42T!d**V+E^Id$gS7=I $ٗҲg#O){2fHT=LJKL ŮlJ 54_E& }{^MّLO#9?85}k3V0^{}SձNAXX6;T#x0tw"%d YC]TV CLxf;DQSdKƃtֹT]tSP]Js*/ ]#cg܍*aagX,ĎXX8zƻE\lw-qrC㏏G_9bk50c&q`ޗǕ1UlKT?+vhB *ld.@ vQMY-1OWܕ8w#v B)ݙv<]Ǩz=[HuFL*ZP[tѝ3"Z[?"Ÿ-};#d (:5)[8Z&(|H$9lag܍mP_KmΎR1Y]c:FDG4>fe)z}V2@Χ ,ɠ=[O6ݒhUaQslVJ-MT )DV.F(u#bg܍_38nƵIkicpn@X'z\q6'ePU|.J0:gs<"I"Ά,\4HkD>fSagX'y/*čr# O~dm'γGB2'˔_5Gj2N$BzKHΑ 6QAƘ>&BnyIvC!+K^`R1JXQ ِI>Z3VRXCd׊^Cvd8]yb'h}騒.p"<6Y-ζLJ4) $P ETRtXmE-e> 0n7%2bTp@i$@mxhٓ0Hs ; >v}ngmskJ4cֱSU!WoTvyCB3ۏo>}nWc6p_Ǯ/Z8eݥ63tb4{kq:[jX)pC%<*}n=Ap$zZ)Dƿ˂6!y*ct&EmkvP`z1lpf/Վ'Ͼ]29zBR#%yDA)K:A2`ʊ9f&ۉNdEeڛvUdEx` >ɨtŋro^(Q=3zY $ "ǔ3*'v-8 9:ccԂ#IS l&)->+]&TBd/LʳQ(oL-x=lʍFe9k[k_mɞcI긢,_(4nE/ L+q[(zj)z,{WFыEpV=p>9GwwpҪ և+R/L=\rր[W,0)5pUŵ[sBԦÕ<k+XE-*0nwU}`-,%껁+竇˯]=H`aZp0iyzJ,LW%ZEp҂R[WU\mVm:\U)tWरb6/3 qK•B)蠾vhLyS6j[w#{wWՁϊCL|J9Rjhxo \:gͽY)m|Yhˏ/ae7Ӿ{496mX`2bk*.¶8@{#;@UJz5:@lLD2k5;.Wm&nE&GGQVBC*:DRHnuE_r \5ySs,?Ev:Օ];΃Q;?  m4Yjբ"r/ZU.,3 ~(j&_o0>1L2.uԊe)?y\t 2cRH%`TUJ|Ov15kk֬)HgA6]^..*7}9m`jQO~2) zS68-LGGZ$zvtwFjMNT$;^wq/55Yz:+jƏ+}=^ vɕ /-:g7.=㣃*ۅ"Vȇ{~8<|XՒ+[fyK'͐\̢|`CLy8j1GbGo_]o7WEomCpK&xhS`[Rr7ܕ/ȴ$$"$gq83|uۭkkds˗쫵]3֫Z".e$W,p8&ųWrl؇jŴrV> 8'~ ەMp:o^o߿7}w@oUg{`=. }MM> X8inـUW-U-fߡj%z+|z׈f- co>~??/s%q74wKaadc.6'Ɯe|Q'/l1x|U7>ݵa!݆ht !Y]UeEA9{xTqTiը?tLax"Ih plq0i(U.~.Nb/}F'v4BX>b.D/t؄aK`sK *ECd!@aa"byVx&̧Ϡ(`W25y" è0S5xmNG\XV)SQQ'y 5 ɚv4Ye·|-|-E]FEm4qD9 yqK>$k5RMt&ּXB\x$Yl -:aflJ!GIrI<hp'̐HZ؏Kt)Qn|9A]Ҥ⻣;Trލ NDqvP6.[cњ5yecU)"x޺ ي>:=9s`DvlGBN™I Fk(K81"ulr%1r^)  />}ld?2$I1q<$)0ixAPeM ?o]/dGe6HZ8kQ|;'7RU>DZO 0 85SiPd|M)$SJ g ]RTB!sN@*ꍉqEb6%a)j]t)2TȮ enlF<Ճ>l~=s'@y'?c0R0ewp CZZ k!}T{[Tvh1ldhښ"=)ZPR%(T[X5v ^d2O}>ƫ{| M+%W"e+@d"tJ 8(*VcbӽV|SW9yhIh$pˋ\Bl}귱[߽݇ }76Ǔgܜqvkdg';3=c]3$if___^Քr`mwXHPu'577jn^>;<1m~64ra ڱ'ueZbk\VF*ɀ6BsNq["#mRޜ+ЎA`` JgUFSǩ41ДiP) nmR$vHcs[ p&Q MfpR9* okU^7'糳yxS Ƅ#$0)FX`Ir0ZSH`QiE5r ͡A$HgSz;EB/A:RD‚Z9 j<4"8Y6Mk,.<*\lR(\}gmX۞xZ _?asrtwKIN9wfYT+nϵ\S)K.ЉETPP+d3ՌRrG&:$UO:;%P*蔸U1HA}iX9krX.,63Յ..|P]jQ~YF=ǹÛ/ڞL>9RM!X3 !6%(C^U.W$$:q\YxY @ s2f4,Ú5A%vDDc"zB{kb͹]c8\͎m.1Pʡ|JJʤɰL"N{N~ yIP6 i2"CȢ= uM-I1"օ$H#Jia}X9Ö/bܙǶֈfЈF4#y`RXx@0Ed\1g H[tN 6Ȩ-b8r(8rⳠ8MYϸQF E=2iQ|>sWX#kt"C ڥbd !^A/zqӌ7O6&EZ p#$1i"^p%tCD1GC]чfǶև[|KBE֗՜7xڮ~f " 2*Iӏay`ϒ,pԑ;B!P ʄ:k E G C|Y˨.EyQXJ<|(zn=JFuJ2SY@3cc I2&q'ɹxp`9rS*K#k5t}EC!2T1kc#ÓP'N& AL**j\Q3whp| /cP@2 8sA)(..2a^*bXH(wTJ0[x6Yj|j\L(8==8@GWj]>^e"A[d%suj΅-^qᓚ(+p ]y:ϰy@vmՖ~cOp*ut.DOHӯDtpk]LI2M5KSIolV|Wlv#KA(kY9w LiRH&Guep ,Cݩ9 {fH=^IT;]Xd+vZeQUǏg 1/ UG:Tq_뛦Ws;LML'y„Z]ת¤jNbV?^3R{]+ks޿rKkc2tr2.KU/"2>_E]dkr<tϞ)~+xnOexۚn.\|Z2n~=!/WRUN,d>C6hWwZޫ7f9K(~PNM:a& G)Hm)K&Y&-Ovc\J7?'3ɸ>;?FSVo~l4z ׵wmGjAy5NW%I5톙5|4* ; PF)? V`&}ўS/-RaQFpq@Uh7|p0α7V 8F]KƇrK1=Jcg@5Z ~pK'd:Eymg$%Ky9ent`RUvÑ5^?g_jFHZ5Ggv^]u7Qy*oǎǪ 92Y.#{Vg6K >vOn3ɑkBPLf!/lYuKJ4sV6ʬ zB7DǺHI{X{X+谕A }*ڽC1EwՒsJ]4&ET"^QE4V'$Â; uQMLh*#PJ.8j9m*I44%l`.2@X%7ࢅ_xaook}9c Gb΃NʩtK@4*Y &@YwkhZ\n\8"DeP()M.%M4( 縥D'aYb|x[#A!k_p}k1C[t4ȐCѕ!l>:Ǝ|\l?`uB;LƱ7I1v7y7e Y"!$T=I{Q.re}L>$g#t\ ]3ΚnaSeH>ת9M|hR3KK.pnUK]j[Ъxz?;/3T +nuIއ?N> :c٬v`z`l~6~_9ɕIf[#kcCs*r{,tb-F.:e59bPu)(]k׺gj{_-z,.0vrI]"^k$ҌgjYdeO&!%XNd9o[}dŏ 4Y$(ɰqV&}dk\#?$+RY1`)n;M?MHW\ՍI   O?O]q }"6So9V|u)A)#)hc:^.srNx/Y]w#{a"7/@ݢ/C B{(pt_]|nyyd ˊ.[bcyϑg sˬ 08`iIKPX"{,pPtSS2 f0 _a&Q3Q#'︋j)'sQ(^&feKDR]DDeSfȱl * 1Z+ENM+PJ^qk'M&`[H]-O3BlTOߗzj{)_"txɧ|jgd!EKĒ)8/ciJWnFw0۔a+ >feXǭ^js5|wwfV?۫r\<*vrtgwK2F?^h[H)٨y?}&F|4ZCTe_ÿxP^gMq>Io黿ygϠaz׽59kU{qк2>3|y-Uu}1BPFK8`@%dx J-9 )iaQ6PÑLdoz${)lPbf~ayia@qsJ3`D!!rBdY:> 1j! ~hΥm-slt'8^q~&X|=JJIl$ZaɏohGn_8?{{@˃=-~Bg.Br DW̎1?d e5>d%XL{~MڥكȞn O :>b|{tJRt$5ӫKw iF]V8>ӷRj6 lX 4/OS(e+-[h&@][ n[ǪT!+w編|h9 W]rM}wWo}.=^+?;~:ǜOhK~i9B)4fQwU|>;L=/fF"ts=>`D]kwj6v=R.?u`;Y&2u7Q(@Vqs 4cmv^lN]^b ~G_9~r*\) ѕhqI$`ΉA)GÝ+/r%`W Ku^;ofiTJWӍpztIGV謈 OVDD4{bVլ)1ϟ*$fpL7I3ݨqƷ; H9$fJi4Of6%<&&Z1*ȴbA$PhsBG&D w)aP\>ǕJ\RSf>$`als`X6b՗y:}c-ZVtgG{Kn1͏Y k[zz9ov귾ҽBmEVk.}BJ91Ab.orn+v61}zwz^5qڹyn}NwdhpO'|ӫl[teG\mwwQ/kNzUMg/BO;P:s3qsDŽ B,:0-  ޑtz ]Jl!IR'IXANKќڕ?htnzɭ{7O6;L'=!EoE#Eta޴> W>Vc7 ,mYF[tR:@T5nGx4:€^ڪH7w\XU5(k kti=R3q4,r^4l1tըDp6A$B| w0xCz#ƣ#*D.)ڥ궱\[_Y[sJzTRhOJt>!Z?Rd(X;Y`鏰ƲNEjشBVaVH; 8X@*C5-l_LGXlT,.6Z8Ӫr<@MH`V|(d FAPpA<;ɃdI42%^Zd`b4.#EAGyfqH.6ܣZвzb"RLβ:-OC~&Hwq3##Gy\~ܺ.jPV :TJB4oOFohS k D*O+HO$顲%\%A&8-Yu}i mHzL@>G'uQ9 .!+4J&Qft.yP;sKI F &Ŭ̙ ̀b$y ǤM+dᜊcXgDMm`iڌ;Ũ Y/a/U>H\$h'uc:$ 恀etyMͲ Y2i@{ZLOA[.(6F"Cږ8-c=RVӌB[=wMcA= }nq_4hx<b+Zq&HzB(l@FZA7gm\p2+z yL (fS 4m&nR1C9c,ZQK;L'F1OjWӎZmlaݥFEh;5FH ,Cqgs&R{X4~SDu&ji2Yh!2XtɒDDhp$EFT'a5qÖ/x*~<" q; Eki7}$# <fLW,:hEjgXE8i%rV\LA% 2AĤI-WQ*[j췈g>u 8[c:jZ*U\.`KlzgeIGm3i8!;'DoJ̼s>&>cSմPVa&bq rQscuuǍweIj"lR#R+okT4A&2#,j=s2O5#4ɈbHi'GpR,U"hťF ZѴWZ;e BIDq*e!` J̅+E$2q@f"#2su5qt >6W_^I[Z.CPTu{zKg2)RZ Nh&A{t>g2, y>t݃寷` 7ս8:ul Q/voOW*֔"`z)|c8~J:5x޴>py@=H/eFˀencQl(D$Uud yab}c7l@\47,'T@iUY* H+˖#:iBZI{i&lFLi] Wm &bo'قo~LT$>͈UwL_xjQ:wyxJ=絖 ?(unzmxp ا_6$bزXتSg{FW!8,~?|{; 3=aY4w)Kh2e+ 'fwWU$^_t4/:ets AuAr|7@'(ZϙED͑Ksc˅%;T?.lDH7 &!XP(8%U;:!Ml7;Pʍ"6%ԑaYqGkTr")$C ~:_߈_T Wz<7L/\v%lW. ALB$CRAMg/_.ZJu9 ~in-Ź$jq$ 1pf3g:%j= HSty;FcRa#Dt",$GC*Z3‭iU"bK(F[{9A 9J.rz9,o,޾vKsCKU.[Z^DMQ@n 0c6ќ1srXBgP3ȃ_ O%3hi8^r̚p*1&'-FtwREb3F v);/# JN/Tj4v^NxǶЯqŽ@XNYxi 씎YIO<6&oLMq|XMV-̒[ǡ5y/W !- YsX߲|>}UYש_ex4fFׯDz(+!3>A3|&[J]roा`ǣA,,xY s8^8L9_&Sge/>6U\eݘKr޶mmqA埏ISsR=A2lrIL8|Rߘ{@"c$Of2 PTd#*HV\^YK $ja# ]HK}48 cdό?ԩ0dt^GD Vn}0 IV^lm aF5ګϐND/a-N ExA vp`e/ t^ݫvAcaqQh.rwz39جNK5oxv(\'sq~ u/ `ei6ߪJϞfI.>"*ӫbkt =0(Nq #)Sg׷ ïY?OYq~5-&{v;3VNS;MIZ;.En9L3`GOoSkXIøA$r]+URg4iaK>y$ª%LqQNDe5Y/Х]Qfw鯈Q r@NMGǺCi(ޡ k!}`"6DOno踾{ ӭya(cKA*`"`eppڨ0gB*F:RbBrxè1jvWR %hkN9z8{c_O2z"q^a(4QY6P ajL0BFV2PdX tM:8Gͱ#2\*밊a,LTEd"\B6xјI&r4@Ed0Wss,b!BqaBȴƄ;̈́ x8ZfR)ͱ yF~\{\w &F6_h#lϕش_Z6D}Yi![T9wo;~{)QVlm]y}M}о%V3@`z+mDYLcКCTGf( "ΐBC"4t(}; .w?T~\*ry3L[ %NN0:IUQS8U i`p& zIH``^Nh8s# _zN B+O5$DPA B7 zI9lsHޝoegtkY Fg*j([?ڨOޟR /4 V['+XVbKXk2iG&s߉FIoMFQ`d r\+-XQBYq4œT'y/yI0*%Q[% )";cAsʱ%*9b'o葾lڽ&O量?=r>9/ͤb6ÙfCĭѱO(}< rs$iz3iS>XɰSoSTȅnۡ, j#}C U-RV^{蝚tP}XhCCDCLYShZzVcf?{v}dɰe>,.Ns0ݏ;)i zg8f J~2lփrG<>ʝWMG|맏߿to]5tޱ<[N!'8h_|YJGMP;,2@D2l*/mp߯ҳ,-ɺ4M6=Dԣ4K}0u ͉lIikC \[E@߸G1pDQ  =alt P>FJRjΙ&8K34i 2f 8Tqi\F'/a}ƽv>'ǽXFJI}6壀eeZl[ܠzFտ=exJj&cuܐޠ!@`uU}p IL< I9(<m4իf*նUʦ*ܨ&lV jho9zTSt@|4'ߺWx2NغAZF!eє{54<- --< X S}u$O븺X.QAEVTF 5.ѥ]7&WxOnth4c{d(aj+3Rj؟;1P\E;~ӚƓJ׳;&JTfj蠍/mk^MުtI*saGN?цElkz^9S@۬V=S#s"2$ IlhƁO<ɽ`y.]gyiŀ^왻[.k;6o$8x$(89|rPzbjiH:zeLxaz5~kTkn7Lh+';#/Ǡ": ^V}šW= Ba:ꅜ+\JrϦ T6J!۠Ln d۰.n6m }fW+",VRk+`4ͩR*JG(R9WUtpJF逰2l'g)pXe[FwAd|(xiϬӍ?a ߫>7f0[#ve曇:JaSБ!t1 A* Ƙct2@MN6lI 9Zr;'MHz HjΏxOPMzёf~* Z:YN}5%j`STI'kNtSik=K2OYoY)3άrsn$MW8}=ՠld 9%hq<I+E*evL |ĂVwUx]IࡪaYq1SpTgiKgdF~N#q:(x oj2R4@4:&䖦03,* D+r Lb ê^8KB< ga(RKO֍؟XF,0=Qt*"M`ԈZau@KY0Tia)}04wa-L!X hZ8%,* +9M)}hqO!*Dh' $LڂXlhL3fa0"6;,s$|NƇ;lRo;?^[\z q9٫W$9m.YǒE,Luu&$I*V6*ֱ^%LvNM\}=E~90XM,[GE$[Q&=c=@H)D0ΤS3E.WLA^@ս(85iatH\I-:NpZBd;GlKKEƦ$U/AdN/Kg4r̘R!SJۨ4Y׿GMWV& Q0y-SU:OfFZ>yu]ioTI+)DR/Шu*5͌4+VHxaG{a\*Lgqs"D<g_c0D=fz4{[7}5QWt~x>ם{TOcC-3ؒҗt<Ԍnf 1Qɐϫ>ݴ9;]p*խrաgmۭ[=m3e#a $>?E6#'~ټ{f𷿼9|7?竿<śC.?>;|GT?Jj;qLpfo= MњF{S;4OՒ^>~fumH_&?qNS;ꔇn;t]5g tt=ϓIgǖۊOZ~Eg՟.Z4|8w)C vȗJ@-#\>pčDȺMwJ \YM<*_S1zS҆^Z_[37wq᎟'ՎKB%fcpؤ HڶA9Ay+DS&҂vFYعSH7:G 1ϑZߍkty玟̴^Ut>Kpa>{}^kV'nP9n>FK 5EpS0IVhmݾ}Z)}FTt_RӓaN> [~~*CD%]TR%w1P%$m Ja]f&1@J7 Ԧm"5e] XէG}9_p_v4{h~Q*%12W/Љ 楓^hƒSpy ?oIM@gʮyadh(ZӁO WuӨǠkM1w8a^k6tO۵ѶBaZ⤻x]9j^X>Wqo+O"'{lWj?O~jϯgSk*\nrvJ)\BK+[tcz9wϏGsx meA5)Sß3sny{4݇Ņ[{j׵[em^>\x+59_6}9bd%KAJ]-8YDnWFXx'i[R-O?~^^t 0z} '>emns2W#j1ϫ^Wʲ胖"['&jj ^0ųNoma.GE QqW@}]C̤a }چHYAdB˲ |lɡZH<~^jj̘RB)PᘋR2FIƜ0+Ucdo+5Pc2x Zu{Ց|H5Kjrت̪,.0ktU(Up- wKיF0cÛ1n cDJ*|M1fl x43+ķz[#lGڡ/ ҆V6Ǥ$2 xC <5&X\L2ZW5xf JmƷ+Äkz!hϼa>'G!/9B"iippd#<"Kq1*Li6) RMʆb=o[T%nYUC Z9+(%)fUzǪ1HrY :b_ ݘ0y+z7xS vy9bxDj3x+)f 1h a!1\KE"Q ݄aK$-0&lJ80)8ЙVNecX"6V 2I_~N .;f %a7h,X \Q{V1(l rABj2ZS^u&>uK&} 5ID^,.zZ(@H*+A8PRil,@x@HtvPXoX+1sՕb1N x3Vk`uKk׽,3f̪(N1ƀ/N!'xğסxaZn}#.5Y=P۬dpҫhR0 Rn dPgP j6V[/5~a=(9@i@2QӲB Q`rQR0ac|Oyq `}Q@VǃV( 2`G[UƂQ,TG''"4+JXm))w2<}XׇĵRZU9TX] XצXkp`(c#P. g27* 7Db6y.e};#D ^H&^a,C>j6yЎ2vp d`: (N6G!cUmQ1(#kFV8-5* ΃p:PDڤ pd.vNqfrPvb,@?x2ZF"v7H-28\ɂ_a,p0",0wϢbvHY7A.W =, f,yՠ6H*gUnKo"Zz7*-Ia. f&J X[ҹdJac-AnFٴ|in7Z94ftSd -骅 #I`d*mtnLv7,E2QSn I<5QjhƸ? 푾0 |ျ!(A/Q7C|̨C 9@XH#\5[21OH2ˇ{H:? EkmMg TfDsp$k 6PX?ǰpHJfP :̀zQrem]u0"#A |T99ksڌ!$rU.u/c+) 4 #!2 .P,9vQ@-0 U&y]p]ꊄީEF8Ø"Հ]nj_b7.g['=Dx ;R)r=:_Tf֞ on>V4+fI˃t{aw7pvo_'ޕqd0p/Ǭ' 0,!"i6e!M$+4|buU^7*Xh-yaJX0Co{7S܃!9FK92uV}Qݬ{\x;Gͺu_b~RϸA `'Wd]U!XsY|WOwb.NҖ=$B6h,FG-ToN[zS0?Gsݳ·vkɴ-9kx[btR鏠kp"bפkewbg\c8!yiFwi76jmLx&LjF8\+np0%sH\tIQeP7Yw;-c-? qEAK޽Z9Ph_u2WLCzw¥J/Vwm rc5!gЏ^j]<{m1 nVtOijouσ,0}T+6jP>mWdCrN[k_^`-4|Sx u%~R@_Pb@2qW=!eU e:/vDʙP \P7P++6˫:W@GƹPUeEpEGK|cg-r %ȗZrV:AKXe;։΃(ԅt2Tέ@^/0Z)M}XڹO?R6>jdʹA/o&n`}^'V߸ "bRKTS悬^x<+9q!@ů3I(DOSl!Jb[ǚ5x$弘i:ffji[4FhH˼o_rqwٗANC(LɢTDjdu+d=i.r8I()똦b.t㌷9tgԞK p)? E&`(*Qz}c5,]N\FEˈx-vcmi 4aZH"]{j&Y %6ɺH$ȖIoʹfp]7n-Gbе]!1rW@ƚQN:3*lh"!BYQo ³8wO&x0/~^{vm@L@:x-mZjp"!qe m+dk& ۰6m Q=@վw*kƎTA{ )D>,o )ks!Z)KA{){S(FSH~7I^?jH#򩞠K%c|~~3qZ51ZzԺZއ7 ^{)G6,5^Un7>BFYhxR|ӵͫ]*⁂U|"Njx0U{a˚qڦqKP2-b{>ݚ.pSԻ2ء%XUj^\Lp5urUg% !SjtF"вVi?}-/??$]tUWFС~ ~wy o ԝ9^oćF~~uUizU8b{o3r1u?X61њ'nnZ<[Iᦃr[x'{ 8 SE,#"%.r'St Ev¸\,˃?]Y`VfI|V[laa~[Uej(v|~T5 p~l,rWF#/iSo'LW_~6Sx_{^#ƹ4:i5fxZw]MmU׭ {?E qUM&[3J!8JK*,B:VRD=Qu0 .:"ј s"4P*m*{ҜZ0S52Eͩ&4fQN&f6ݚ1Vɦ qƾPfօ  #Js]%d|yH:FV_*7V݋4(S]Bkl&g؛Tw+iL2 ~DZʳpΪ2$gx6.Ue^e) ӁiS['Ufm85\v6WkZ[nx ĚzP,s)R`{ ~uF<˪AqVqCdY_A]@K#(Xy*Tdևن[NsN>$\4b6W#Q5Q#n@`éFxgT*(T1xNAr/TgՈ9fԈن[#^ Xp=l\^L!$^L^9oqh3}k4?h4`awWay3/Y_яɕ˳ǟGJ,wN.ތB)xB0]W>C 1t$R,8PeP*|!K\!]CB;`Mwm{ILuHYh)!:Z35y`sTˈ g |y.Q D8>] m6~kq}zăJ  (:C,7 M>zKrQ0agBr-{%h g2Dp|ЇLW#bi1gTnaHf!6B@]ëlQ $+2Cn7.vays'𠜟GU_>+vXo,o9ORV.]7tol5DR!o5̯H)>}ܛG<8gkx@E )xӦqX:wLfHrD,?,t4ᑗe\scPK /\)S@1hƬ9)ʃAQb5y9 * ›Ex[BvTԻ".jsوbkWg =7O01ϟĸsDMt6)J`CIc%*jWJ)dx2\OT1H.d$uJ!S,S J0,x!ܹ{28cj,8alQp>i- .2*NcFʪjm 4wFm6*GT KIwPMن[7⬗p(7b峯Vggۂ=7Mg_q,ȳ䚩Ý퓪fG!Gh^_Չ6,׃yq5bo_:üyĭ,N?ys@=yå O=cF@1gE)U j$vA1bHq^m:S^\\dJu%~ii-ť$jq$M#<#|8,`, M AewĖf_UW(;&!{w#(*L&{샹腁.g͟k]X@IE9;=3y5YߝVf4s5"X|#b>Wc,Y,V~ד槺*xU؟&F۽hlӲ;i2 &|P܅L .RŻWdh__y_Lg`VTLί$q/nz&?ߩbq* c(:2i]~ 7>  Ojα``kB@>mliէDu?᣹=lcTK>UTAӬG0)}10&6dxtn}:J}Xs  =~^* *LE/Q$e ,"UYE$Qa,Su+¥&Lyì1jvWR%hkNYs֌p ||c* +GIblVՓ5Ϸy(0:9Zs (Ōh,8aMJT &E]SPǿ7{FWwìOBX'z˱.-=re+3<հ?M ]_Lޝ ɹx?,.8iў{=7Nfj/k^[uJd&&uʪdR "6n+ ~jDcS\s&B7V^GZʩ9HJ$6,{'Gnj^hfNJp<.{^:qO,\Q}{uw(_77M"N'/Ё=GOJQXSL 32IWpP9`WqƳrW;jV}Swgqō s9lා،ѡC\x S/J?)4<MZiIinQreK\4v@ၱXjT ʔV0@GJ3o*ކmlE3ܳ/+Lg+ MbDT) c#S)+) 8%T c_t@@3lߜ < ^sU>cu֜5.,)o$C+fn+_]$7n^Mkc|fkCwĮ|##{CGoΆ0 sTh5 Ƙct|9$m2@JF/tJO03H$GhQژh&Ka/G}=5aBAGthl Tr͉g3Әj+nCuU **>a1ԿWA|h98}RWߖv;my77ƃ݃ ;殫@-8fZFo{  R ReH,L/^.r R+R[2Ee:0;˜8Q+iӅU+DY]]K{0?1еfHH+?_NMJ3q %7"JQXȖXR!tX@,>S_[Bt';kҽW}#՛`>}gsIn uz)K$)z,5'tԩtZGVgYɷ45˖_jw{-9#T  ar#J`%y-(eUjyoUh(F<\椃_'ZV3MrzPs  `x#q:R#$p {^ޛm{9 TxH Rr { R]Y+rs ,j &A4n8K;gaZ@3^k?XzLiUD((D`,'ˇìS %uyi&alL1's Jh@-8BS*rSx@ʼ:^H\i$;IG딉Әi,F~ge6z$QԴH8_R#艾Ͽo< w=! {K0y',Lt:1hX~?Szip~x|DTUh'% ȲUͣ}(ݒg[{zX12`)"K;RL]L&{)(^~ dE-6`M@ZX.*nNIpZaa>:G_b>WẸ4"G Fi"݄3TH┊z l_5Gt޵q簅{z&?T]^O5rS=zjz^"8St#KR}3 oWlw' h5]UCګѸEay чQ03EGu3Г:}ӺVd[m}M6:IЬT1y4v X$Ob4sycP(Bğ/Aӗ_2ݳӿz鳗N1Qݯ` 4%vn]ܡwoW;kQ]wzW,}#fAwZ@~(~l_'.q\7թG95*'ЕQWVr:puU~^nћ]J(βiܶ1o^k`IXpV$"G3I[ "F㓫H F/9)d;sCjNX0aN$G5 T$E(Eh 0-I#(f'\sOL&OYIRt ʭ îtKάc}:(vJ8 Bӧ*brpu[Ƣc\g8۠c/tLs14 hv0:*RD^R_OcJVuSPm  Uyo7_U0fx\?sЋHh!oWd{W,, xw hW gT){;|'k-vΤ^t253Kn3+Ql+|A$yw-v,X2ti0ؔCs.ߠY^2Kfy,/%d|$d,%dY^2Kfy,/%dY^2Kfy,/%dY^2K"$%^ tGY^2Kfy,/%dY^2Kfyy& Qݹ rRm#ePH۱yPt۠{w/vSؽACײ(v=X%[DZIJf<|?jLZu7 JY!h6 m*zP,ƒl;rz׬T B2rh% :WkJ{M෽y"}#{%hYc3uq؁4@[Cm5}V+ZѻQ+6zG"ʷmKϢh!)dpSE#|3Z-Rb&wN-9iK:fcT標w5o?NQS=pkŝԝCi0!i&;6*c^i)59Q偕omm8AF[%.Xm<{َi#I\.ړ< )D+Q#HJS+[LjYs6=BzՁpgƵKsmp,qz\qD=/gVL=g F%%K)2cagc[<ᖛ<-6 _]VsL#7WtsUn z'~NGzi3ybk#I!k-eO~Ѥ!WKd֙:X ,*zG>BDaG(xQV)Q3ŀʩdG*2 D@pE TIlDJpvr&RXNT6Սj5Eg\HeӮ@?jHyP<r ؘ,r cAuf[c3V%dRXJ`7߿;q&K̮ۧhC^,pLiaDIZ{/w}}Iz.8X!z-A:uH<; K^u%jᴡa\CH2"hYA+>2Td1c rx'{Fm@E#Pɻ*K ş4S\lN ޥKNF u$}n";9;=@die'uϲB\5ߗ=e;&{-B@ZJjO_4iˌߑrHSL;$oWO xbCr6w%wH433 bȅnKG]jWTO' ALrZFMZDv՜ Sݙ#*@adWh#LIdN*Qԃq* wJm}[JV+g?横ϰ UG4:TWeTNpt6o/l4T0o_V*fxVA<o_|qΫ^ŋŻwc5> ҀWTg3bǑKŚч1鹧>+x>?ZvtB|TQ,,<8kSMUc!Qո,'?LU-9e'5(! R5'xזr}O{zÖ{MFZ?X^?1QhXA~@]NR,N`t1´}}>U-!(\8{D@E Mw=kJ{XB;p3w2UOr==zvl+\D>@P/cVw4z Z\tI8oe}i)1~G=TCh9 ^l]練_YyWY\._ZpË[v('$DYOÂGZP+15gw]_ ǯ?s,DqVF %(4` CйBQ!`&Zp İ@)f:D,WƀGYޚ(Q|P jK]w֜ n?[ 'cU,2tɟ/ .DCÌlܥLΈӀ0Q^Ic=ʐJ_u] ^GRTLd\<$%v6 'Ȍ3L{a3;4}k'aÐBY)KG˜!&7fwv`tO,wMh=~f[ZOctN`p[5rҸ:I(d{xFQg-!)`(a D-ˁLJ)p'V|F.rE&4T<<z+{ E'7&0bªi$ 4 "1'ڎG_wN>s~n]ߟ{bSR WZr\}%AՕ3qPxRfo63 {B3'K]~ {+/k7 Trڳo|ꭒdT: 64kjC 6Lj=MZ~׿bӟC?C{@Mん&˿{)[w VՇ-:he@OM_U?C<˽bƗ?|_|wХ{͉[s <\^~b֫Oσ;ӗ?ےƹrPmR1 ҁM!)1̠@D?TS⿘ ztdlvhIn*P/EרORf]O/H' Huŀg'.P£p8Bfj9`t)"hr$fڦEb1!>=m q^A= 80 pػW/]yqܛV%ns? ś 6*uިnR*'QqQ.W6fjixؤTڤ=?{׶Frd'y g;+`cA^[(Mv$/dfY$Y}3bVddd9` P2kײaM6it&,Y¾wϢGny.nז%[]-8*Qw6Nxcs9{;?m-}o X0.ÇYPnlZ I5K5lm+AOͬ`Qn*/t6_d`91]W)(%_45O<.JceRRCP}bwXrn#QGA7S>:.y:ogw>H]iuO6E֍tZGzK3JRO=/ZQma+ 1DF㜱ylgμ,BJz)BL3c#"#B1 +ۮh- |M ]ӠbkՔvE׏CTuxS E=k;e|1ù,ѹ,+-KY2Vk(Ȕ`u"se ,|߱Q|ZQ1,w9@\}e.1S$K$M0'#9X7UsoWq:rξe%\F'Up#<^f6+d쁦(||ӳ.SS[BmJ\ J:1TQfO7˧\9nWv}CvZxr=g==26?[ϡ '?Ϧ'^צc-.u Ϧ(_>=SCe_kfW(bY0^n: 9Zi~n\j|бFj!23??3?<,*ʆŒ")YkHHyt B6eR).T>JsbE`tuT,6Ha0ڰ`=t W@=Ğg_^>Y > QvydP4SxM[%5]krbJ2h.Ӎ4uw/) "sF;mUh54swA,P߯zyśrY{1H@J[ %l)R,V'7) lEy=Xz̧4ĬkMo7Yїy:s5,~ADH-f4k(vx}i:2~SeuNi=̒u?'{{֠mm_m9n=jG]B`{FZ/֪uE' 5Mj7(OJX+Xha:@ NXe_' Ja`̥]+:jÕ+kT 2t.jHR95Hk y\K{;%yggJJ*gw(ɞl((V.&U4[1Մl}4)*ϵNBAP~n Y.ɋ Ȭj%x @V2cԮ$;, z se۱X;]-=cό[Z2S6hU-A!Q$:eБ5JQz~5$dhhȢ!RHMV>9A^Aj_6NrB0OW1ayշZ}f>],^7/?Ljnm5eۃHGa9G^nǓ`%4׵~ZvAtA\%/Ucj|ͦr]%#}zz[ {'uƘg ~1UG,f<Q\s>˓:&7(Qӎ~铝*t1 6 so|7Ǹ;t!i-!glcyL2їJNNy8OaA>"u o ; =I#k=#teU2Yy9"sMPa:+&QVGb qάfV2#XGjS/ˢ[Qp}|@q}S@kUD ;I`nՓ ӇZ"fOW0II]MӤ[ϴםWL8e XnDnYa6}nK6jFTg}[xfG_W.ql–d0!45cUwi/CDG"E+1xϴIFNEs39r AN=] 17軆PHS3LV}u0`af1]> ֆ1ИUJT5%b<>k_0K5kwiWwKsFc?|}512ET=-Qsg.A%fE:ˊpruwvHWNg9ls%yzqu~k!0ǀ|R Ԑ2"BpԂ%I`!.R6HNY(g!X^a1jEAb1JK,oyItf;Qo+H\ 3{ L0VĴO2@(`PrƋ@ 3/&eDxAv0` ^| Dc"}ZE<’ - ).IE D3A=byj9.iUfP3@)䤀`J*2 'Hа[+4qrR?ԃ QA:Ez|d-jmZlSLCg W!҇ PdYIo`שX/bj$P4bV/TXPК@r PA"DLĴD"b>I6S# hX?P=zS7\{Y-RZ[x# nAd6&}‚ՙA,TG,׊^yX&a`l2He9v,_Xv}ӝ7,d+cCX'XXk=p`c#.>pi/9 TD%ྌ `GdQv&!4JpqizU=i(d&>a6ԠQy %- H+0:LAYͱK/, $f'"Ҡ ` &GlAo4i%8\NCAac9agXa!^/]К %Ee't#,A5 ui<3`$k3o^ \)DX ,+,#ĿAn$LL:T`_^.kX]R)'UCV^~a Aj}ty-7A70d..Ƴi^\]WuGn`@M%L{&cvfR`!ؾEA۞Šf1Pz(у{UF7jQM<4?;6$8`8``\ öwS4/,2.'H, 9]]]]S 曶f옍Ԕ3`QB%ɪ .Ya7x(_ ܰBST\ByvTu`B`! 0a`jO9+p=a%D}z% *Dp>9= ++ 6EELY3''.\Rm$l@#L+gba#/O(+aQÂ\$$Q%Ad`a]O+uશ),X \G@cqmg! GKb 9o̵&3)[t9t .D6 1R.:8*E}\jl >3,)o脢@dz6;N^`ZEg_I?^tsVv\>v >xQ>͋uTG^ 7U5MB;[u߫u/Աu}n].:{ npէң:c{ΌaH,Oz3?Qw~1t;Yt t.~9~CJ+3^rB3z߽f|!>3d|?(֢y(2C$( ,[eV>iu2e9r|yYLZ=jb'Z*%QI~I~!.fjIᏯh9Wp*3!cf781˅=)*D24DˉDXNt >.vJf|{J^57AkgtLq& c}.a]~םmY:0T zz.^A3%ç5djJ {gH]qgPM&6d?e<}(%>c׽3o>%-Иwnju&{AjZapx,S &:lf{>+saϪI;FuFIr6a3" 1aLLA8m ݴDzZ{Q!YJ-tc4r,SpUi:iՌD/lt)"]{ Ihziv:CPҒ,HaS(PY Z m5g[Lp TixCePu 49)$X*!'P>GI4قHMֿ~wtmٗL7:p~~k~PBRw3C;~+ldF_qg>J9 XΟ]./q~YFߦ^|~0R,gx?#+6ljxי֤~thjFJ 0J('\\. 7[߻^f7$'xyρBw:ii j{n*a*Z5"C !3 \LqpؐsjC2%o?W[//ǷwjC4FaOAoꚒD0{{ l!ixtj]AfL?^1?W/ofx}.)Zj^f:.vTd\/BZ>#ؒ[mj57cic3bLAm`(|zl{o뜵2y'Zmk͢QőkdZ]>~N)dqI/hr+%rS:fr'tn@^yo߾?+뿾z+u#0N6&z|GY5 ͛FѴmz6ەv7,T:Z/ u~|zՏvGa=Y͖R+fkN+Rcy + 6,飘fn]*F;_ 1C4~z=ưk17qP$.Nvi('wT:KbzКN<8k {4צy`7tg2Zyh#ӫ$:"ԃCE=T[cF҃ԓVBA+8(\o:*]iMZa' .;oR3ϕM# 1էo ׶~Z4ӹ?o8-y\)oe5I4I֧/ f^=ފ߸<^!TU|}չcGWy\=E5}*Uߝ߿qh]HLYSE* !mBTbkDꄙZPq"N%tڣd9'ܚs9uE攷ik9 Y͢KL ɰؙK[Mt) )E/(18-"QIVg &u;˝^iTˉV}*I<˅}w|cT(//$ET^s#Jp3Q%VQ^^,I+.$T 6$ KZc$<,͸^kbB 5qv/t=,74 +SRjMORD᏾}Lܐ umòA'urvmǝJ9U{6\?rZ.;ʒ0LIfu tsC 6jXɃIX[[qOʉ2&fNL0Rl}qY8;JnжelMݖ=_5[ؚfk u˶P-|R[m<)fcݜ~ƽQO?P^o[@)C+\:3qeLhY*MqU {.2½gYвf6 163a*IX/hݚ8-Jy*V58jAVDa[CtZ)ƕ-nY$ʥ: B2g4n&V!,&7 3[HxK T*cҖak춇uP_a7bOI+f.Yp'\$p(gLV셔Vv`fqt#{{-u~/WqPCmmd|Hl >$wW|-SIݍ5OEr uSDQck]Lc0sNOOg^ =>#7Rh =hap @bQmb[" 88 ݍe,AcaL˧L}HJJF7(-,xGI`iy&I( )mSP㛷oVz*M͛q}-\~vanDXY^A]RN˞lv~upcΡe-^B+ y#VGd%{ZeRY~&Kz̞Nq`z+e'؋e R2nPT?tgYqZs׿k2i*"E?+~fܟN9⛋/)uN;A߲#>J@pL.F0F{)#ZoP_eXrKaBB`NxE sPLWΫ{߄k]XX@.rPwz3荙Y}/{s#X!  \,,Ng~GMw%5-]|4٭ëaknu a(Nq _kp~_[VNVSV^LQ{6ӡ˭(0j>v\g9H8W 8 +|7@=IJ}ZҬ?S%߿ć3 Snۤ**zBwMo>ttCwJ,dY>sKw2CGQ[&bps <SppBI,% XD*"V' 8𠍊@9VQvkA٭ml jf '=vWi9 MbTc 4:L-” FJ¢/P0;Td۪MU RYU4c`*"p "JFc&=Ʈʸf=/)=ʁ7Ɵ? !JD!GHdZcfBH< o-3 usͱ1xF~\{\7gƍ&67l#uO(bocowgmCD)Psм$gH 1/IH;g+ǘAw>nr}I}о.%V3@`z+ff1Akb R u@j&7D8!A)9>n(z~{v!hR p\_t>IܟC/j븳K$ VEAZ:L`LeT1'тÙT&0ZE#\ D~`;(H:y[)ԁr\h婦ЀH"л ޭXo; ^3 n. @tTORuk [TG$yK gQ ?^h6DNW.%KXk2iG ǓSߩFiצy`Wa E p{9քx|FPbb0'%Tyy.hcFmP*CRZ*.< cKTr#=5aO:Wb[?e~j?KS}h1ᔮr|bntl~j1sJOVи~n.|o4T۔V:컷Н-47oWd^WXjQJ݋Eo&pg惿IbUQ E Ų SgyLn_뗏>o87˯#^{']>[kwEe$C}:pM@L]{0+.i>Nd8$˗߾~>|Ԧ{몡 0;S==ZO _^GQGMP;,2^z%  L翐!p":VdV svW{IlPwbfmzvP0l ݮ"A䕫qc G !`qhF c l1ٮ [NajfcL%NqiO]Fa?^v.^U,#K8/M(`rY^P=t?2Z|m K.M*y].7whn]U4\ijEPy ~sM*[kFjcUʦ*ܩʦP@6¦Ϣ^_/#`P?4`t #_-8^(LMI}~1MG))ޝ>\<A.&vZ;[\.xoZQ@UG)mA}Rۇt޳ʠmV5#=t27|IfCC5,i׼@mX4:}-6?P=[^xL!dHbx3{+\vi1]Q>wŖZ; zsw<"Q(t A!AϑRt;dS L;F/TԎ+cƓ_#udFru8|7j-Cי=ynO46D < !.S* -"w -Á(&Q!e!%,zg՘ `Le4zl5Bȶ5qv<<ͯ"Sd|3Yes6ePm),`*XC[,S+}ibk3AjRu ^p4),ktEȍMȒt$ VA bx7ej[*OKӳCcocڷn[5 Y׼PrxHɾnc~8͡-WVrG?v[o'8Ew[LwjMGκ\m6ejq'+Z~3}ܴ⎣h3!#Ksc˅%?3P<6"IX$J,h(Ӓ*pk-tcB)7j<[YcXDcZ)2[ U!Z[等Η4+ P֞oLή;xx úĖkG1bRi~K f6ɯϮZJu9~in-Ź$jq$\w8Ù|8,P Y@x;h0B);EX"95U!clЪ8`1waʽ3:aFhNPƖqfk ˯]NG5[x ۛ>mGM' U'.9jALW2I[ūGr^ȹ%rO&KၱkT V0.:{s/8w7k)LQSl5*.W4EGyP6T*0C8't aPJTrVk/;^sXe[jT[QH"mЮ1{/@x}C:ożv%ڶ8v用GCGgN0g mwWI29L0'Q EmI 9Zr;'MHzHjmaK/9t,/9daka=$\ /5kLpmT;63~oa~]}m(#+CCid(+P>liR掦O?618  'B /2u;ARAUsx3l@-بj ZXFƥTOל=yy&ĵalI˳r6gF\t|DcQGH^.j:ա(k@ ri=Ғ=lF4Š_8go#:0(Eb![] oG+; ^$y#F?e㐶uW=8%dڡLOwuwuWU(pȲW^K=yzA2߫?'(iVLY0@9x^eNeo-0py,/k|Z]OeO<3?y< p^$n2jRtgy:8ΗGBqZ4&EO\€}[I/k;]i[@쯥+wǞL=%de KE8z5 Pҋ-ҫ@ EE60#c!+>O&PԻ5#)U{<%y4Scrg|P T\2(?(< Rh{r N ՚Dbe 75`wZ©P_e=@%aBB7A APA(¥^蹗0yp " B"qL8t l=TTf]{B~W%PtOo~[u6?;ilS? ۿ Ơpڣ cwe C)ߒ~<{V .FfCVJI-bşn4Eєk,R/U\GP(ˏYs-楱1%; 's&E:OuFt!aV-m:F䶉$ I+ IzKGR? &YR6Jw/Þ\6.Ė/XnnbKjQ.ɘ kGXJ Z'$W[d+CNqAA̦CNQ6Q0! ƨU1 _YJy>Gg1zMs0'HW '=vAҰr Ĩ,SR'itZ,V#+淬شy#Bl[Oñ#2\*밊a,,TEdJ"KrƬ̍4![0;>@TC~Q E5&JZv>흇5 DMHER",1hMR*#ڄ NagHPJvj"X`T9F$㑅th}0z)#"b1hE -ۃ:#gGL=`tܔ@{a7N=}Ҏ_n~->Ɲf]''Ymwb$U(VK L *4Z0c8ʄtDxxfC (,'wQǑ$'6V l(ҩǖ>:Ȃ'j{o߼߾mUdnv2ԕ7)/FV37 de-yNf$iz)?e;oS^Q{EJBU^-zf77 m4447h(h(P9/5ggjo_<}>G՟\]?&92m|ޤMݽߴIQ_2`l}?YV](̲]XʝV,{9Ί{_OS_W5%?~}oΟto]7tްX W%Z):C@p*3~a[[|de-7AyLp4Vz%  L!p"У纀:ɴ,M˚U B{ImDI3ڼ4߁Ȏ.xZuU$pT}bE%H6X4ZсFB)a:H9g,emWAv6O ԮAǘJ<$({oF~Xaf''{/:J],#K4 1W˖굸or;UZj&] OzCz6TMõ6.m0W|66sA coo^vwV[lR˝lȖjEjj۸ͫhÛ= qXe*dnj9OQ]ˬah1FaZFS% B,j;$A%| X!S6SÚ["T="To>ָ8ƌ"[*͞ڿIt}]K]ku} #@1I~lKㅒԘ⪘K_ےi*G4tAks~j4p{PlTR2ӆ&ZlֳL r;5Z.u;{/BVR-޺tDeI- 5*|`Yŝy[ʊ¢qbS\sH7VGZʩ9HJ$64{@'Gnj^hfNJp9(EGCV`M15Pʴc$]AzeLxa6)ZU;k&nt/'#/'ej-CenX\m ܰ(x 7I0 _(L%tVusx}33*0Cj`%{|)vRLfD*Caj3jXP2b=6MAZ!o7v/^WtS]=Sؖr}B7ȍ'WoFyƓû(N͋i1uͮ ai/if1>=O.0 IsE.grdA U>!:S_ҹB#{ю6śp9eK$Or<ˀj&;{S跀>?*{3 H6($f 'W'˵#JMƠ$H*T 0Zdee L9|<]kvVC^9M&A>Xr2J-)b~ o9CC[檗`ǪWRznFV*D]>1I:_#K`[nG &-gayALACl:3l_#^jUN(\~4 .,5quG lH&zO(W2:^H?pǖ6M!/:} T+g0ޡ_EPuI.50tg)UV_P懬0(J׊?Ir?[^`+ECSnQXV7JnleyUͧ̚1ُ/ϳʼ5qStd]gi:e+2R'ǁor+}CΟObhkc|M0&Y3tj>xGCv:gb@1`Nq:Xc`bҹVpbg jYfU:,?8nEKK0"kY"M` ,ZZ @-Ue!u&F!r 4z>Fѣm@m`Ʉ Y@@ZJ>ri[cRa#Dt",ܩmTGCJ9hUY; ZDl ha[0 JƎmKR.1XSg>L+mۼ:\^|>Wz,- 9X5,wTeU$EIV 7yw?MO^\'oO!vGMdw9fn( 9-r͙A6ƒEYvDۛΠLlOC{!ѽ{B)e[&ZH̥RਵQO ;wrܾ. [gZ{8_ AH],^Hu.iqM2)YV߷zEsHjJ2`QGwTUU]lځO3m|j(uLJmh`VOZ{$ glgZh}pe*8Tu9NrhC (H> @'IfBYKH$Lk )׌h4)((B\|Dh`.*k3&H e0 Z]aT;[,-M#pޖ6<0niJVUq}pH#AcI„${CG; qVw > ݻ&6OSLd)B]|t&}rȾ4:7w|0*ܹ6LAw UgqV5}Uuj|ȍkSm~n58^Z2侯*$gߖݣfB`ٙߌ˃1 ¹>ƹ@-x'/jZFSi R&?U}Ŧ-±;FPS{u^bvpz[cߵ/8;T + sW0ꭱEGQ$ht !8m Sn!dWHT(6FU+|paӞ;v( J WXwL<㓜ە{$OrOz)ɈWfZۥ3\Ěԁ[ϊ;wW8oM} =:pcZJx$$%M px!Hؤs8c _Ծ5AWKxÅ੒*%X\i-OlPѢ"D4Y#sߪ@-q'R}4e[?88z -k`:CQc3:O;'3Fu'"y'y'55mH\᫷`ʁbUKP/IF P2b9ü:E*HgLxI VkB]D8RD1 BM TQվ܏g+ uǜ%U5d*!^V&DLqђT/t'׉'ee_XV1륆ّfͥItɩi{Ւ7s ~ @^!0B/%x]x|F8M==6%d%oXOHt6#P(WwW1upOsYP ϟ/Cb6c.lcHO _x?p=wU 4Qpr&g_54) l>QtsN!qtz &ΘgW,Ssrms7H|ER{np6y>ۅW3;?~ℴ$ekK\n鼭ތf*bacrEi3{6{u3UJַ:d[mc[[8ӑܰ@p$JU^ >cޔ XsS2z.P~Oo~|Sޝէo黷q9'p}vôuA{wu|?}ӢXެiM(9ܮ`Pr-Ps+f (c_{ck/̪6^l5'cvA6$So\-^}WFog>C1r!d<'13_'Fib*LɆCbFKSu<f&nC\t8rZ9Pe^hvV3PѼ$ɍAaZn;%^%h)92db2KqoP];>iTnYtYН A<"ž OP|f[d6"soA7g껪x=r9hQ"))9-9@mb oʔW`_Qd,Dz-RmLQDU,pwcR8 [19F;ǸRyQ0TJ )™M.ou(!ǫ-/i޺v(`< oZ6S cqI&pk<RH6 t `P;ӌqEB쒄UF%wѩ`D*Т{ܝ)\Ùblrt{h"?e9 W$")-$cR@>DE'gSPq{"9Hc,ȸd> (E.IfQ'f ^0?d$I \+:fHR*}=ޛ`nS'#*ׂ S(y.>XD8"s&QSEiUK߾2ZZAsҝktm}!QK7CI[4}ln-f=Ӝ!Y4ja-lTƵ,' 5֚[4!2RI:h~hٚ8lM;W:Sib' 6R)$S.R]yT1HT4]\9GK4#5rT^y,֝-}kM,C*!L2}0O)K85^0o OKF0CYQ(Jw$xKCb@D*$2 :%nU $0/muge,Ub-Tml^mGylW` 9f2~o|-6G)8`c"P5kSR2>P)ꛒ&ݿVzln8 Pv1jV[wVCksp*aL WJ]FEm4'9OGV(; ՞ a,%Qx^I.>D ZdB҅;$@(@伥NXRHQ}\O9@6fDY2(95W1! 55C1p5*M~@< f%i ){P;0eŇ=i޳75~ҪۥhXCXrn[:HjoI֟G~rxkL#Zs&R#ovZ ?EDoOnz}pNw큜90"GBN 6y/C&^LDp]\6q4,\I5ۓ]@?)iMU Io`{W:C uQX&I٤Q)z{&+*4"-l#`ic~c;(N~7%ؒ߈c.ooæZ+m84 J>w^EMOS|rvj QQsIm%5c V2v55.>) ی+_ҏ܂ /EPljӋ%?6#|!|ƳgG~DG8D4:9RJ&F-Js=nr3$3Q31n^M8]\߮| *3wuj΅-~Q&Joٟ!;1%ȉ p;}e1YBN]%*:fG&FPC2rjw$NnK2yO&$ЄcuE|@>`q:qҠA ҩKug|ky)@Jd0h+h`N[ƍL3K($ZJS!*X{)yCNUur@"I}ut}C+^/xY/U 5gոE 5׿Zˣ{}7=:QQu~]}5s] =0U q)oxMɹ?x毚:ixזovr[ d|a}|TWyRs̎ǒq%EwU?oJ8(ovNUcd.WIOvf02 qsc_ ~9ځ}+=>~/ ǘUzCOޕq,2xmvˀG9A1 OkԒe"}CDرę.TW|~[_'A+0e)}}g=Z  d#B{0Ev ̇ 3nY!qB="'2 x/]dIwxzzƜpl+-xp1SPZ>!r@17Bv)"v;?zʎNd/?@N@bg[O_j8?Bկ^u⏯޼+ƽsԱ_\O*^,7To_DK}c WvgfngD"\L %P΃B>d*gz:0:gQmM2YϠewUz!M/A]7҃,9t˒e#6tͨ/oon7ϻkGw|RQP\M&Ѹ"D3sf(#XP1<[ED9ϥaRkjc7΀!q˨BDѯ)B۠$9;F/8Gcs?;)t 'E pd6p9ES)wrɗ8?5ٶfMYfk$Q>+qd=:F5LgIq1ÌN&`1$֭Ҹf{響1-c;c NQZEk3uyfWW|AϏom I$ 9i {ㄒ\@.Uh8T%ShB13Kf߰gwU^Fifmzn;xc^ْֆe'߸')hh"(F'".yD9)6zDž4>$b8F5Hr")8MvP8 C6BB ]W#o+X7϶7~Zd w9WLavBU0M!?=ˑ_\D%2;՘iPOY(4寿aa.LBr.lVAkooy}\zki^Ϯo<0IuY6Swmg-=a[SzS616Y TzjN%?цEkNݿ :E@x` CQ/Kr8(@̅}T!Um5#L_v|(}zz v?!&d5ˀeH7u15Z/GF5BN)YA'J{RT//ceSi'w\b~?YLRˤ#gLFW^U{lUvSr*g2wnOM}nhkF\?Bd8o\ҭ>"76 ԕW65-9>emuwӻ[Jl\{ík<76bf#/ݹFfWyk߁Q Vs!֧nxzЭ6o<\ҷPtΚ?u*Ʊ]K7;WlwC-a:K3Y8#Ίa3\䎒j&\nEBIJ/1&5Kuֶ#)?^oms>Lf"(@ɔQ)o^FQ?g̹SiK\J8s1$Un*D6OcOHƿL+M-ZZ#g%tyGB#M3ڀ\ɧ/|xl=~|t @ZW|Cv6-o|i,,r_dE◗qZIpf3;y83xPD$#2r'![i"9nsBHՑi-mgB EȆG#uJ*QǸ \(+H qfkRڱ\^`Y;b45n'zj:GRIj>ǵ^R ֘Q,]4tJHa$}_gw)Əf{~ TN%)n3R!qɍQrm$5(`D&ПzHE2\ԂOKHG5B kԴFnKd ,Vijx>gyn.۫?]Xv8vPJ`(Y~$HBxQNFW6@-Q2JI(AKQzc'`$=$5jU97Eq=ή^識 hUgN2)*|GZn DNշnZaRu?TŪ"NT\AU!7(Cl%'Eqi\Jᳫ֏W?}794 /ue!Va#4}AҘ~EF9C-/j Z:FGiR)p_I%k#;%4+U ̵Um ;mਵ4Ueu(k+vZ]Wi=ҢJ9kw56.m%J6J-1!q$ĉ.QgP PքD]y{Isi%>M ƈ Έb2 DouJP/#)11D u Ut۞ã'_uqFf+ -pI|K)k*@gj{8_!em8d9MĻX`Яp$sDYjٔ$pz12?zd(I1#Bqp5CQq\E<: Ͽ'\FIO?QoFlt򡮶Ns $GY~Mf>|^cѿ=8԰\ o^kꇛad:oF7ո SHAƉ`|5Zqf8]{uk˟V%4DGr9ؼ^\#JtǃEyr6Ow\N|iGPSb0/]=*u⣎tXfhht\>]״bˮM+1ʕ5G7k}Z=^kS2⃗WxE+='hp>­J6_y'{'/$cC-i=ؒT[:j7hǨZjmQdrt٪97*gPC}^:ڰdg2}ڣ<B-Љ6//{}q{/^Ο0g9(C"X`~y~ݛV͚F{˦whQ^/|sJP.|JzB;sko@Bx۷x6OO<߯zŖլ9O2QZ~b̏{\W:T)S!>fUs[9jaEpDZ%ǵ8ęV8aRrRRDަzI:o=~HmBFK"h/1NӣIgUD\6o0U% Lt22\W3}0Wcz.t)>kiTU-TL^-I$),K2et)$rqmÓ*<-U{\*rMQyd_<,-׊;ΡP*BSaF%s+VȨ) V#Tq@O6ZeRqRɖ"IHItLWiWⱕf Mc,;,|T,zE_Ài.fqQjoɇ+Gl $%@01@xeL܄d|KQZ&d~\ڼŠ"1 ę ^an2e]2܎YWtY:]b.,L𴗭ٌdPmAfq_ԶQP{zDF`Nњ [l P{X3X?"nQ6M c2>H!3$ E6!4\30RQE +mٌ=o+0nɣVq_Dtqi|>WB+SPq+)OiGAzXd%t6ַͫY1m$qVR+5D23!heY&!7$c?f\u ڦXg3-.օwx;T\jvh2>Y"x:dfFYoR¼s>fca3/xx+aIYY>;eGe=I'\ n ~t]Z''ݕH7EDz?ɝ`ZK0K 6Bzwa֙.zXXTJ6B7Bn}%!+K^`'b5sr*;)e2õ"1DV0{m NaqZbPݸݿz~ ܘ~r ) @}\yP<ΗJ X,rIϕva & &ЯT˻K0\kyQR2(xczlė^"Q« Z\iٳn5{Ҧ{0aYzz7޲_~^O xk.J[ E[ĸo>$7\u箓?[lȩZ| W Q=ː+db*Efy%"A&~0qzpZ\WN߰c/wGʬ&+ҬZ) ##/B0b(%'' 6+/4KM3&ƻ޾ʢ7}Td]?4l.|͋n7_ߐܱ#qY/c4|>>W0P e'U;Ω1fpv+sm]ٔl AQ&98 FRTT2&0حVȺ*w:_qvӛKqc;w3 BU:y"JQ`99 ұHnojё*tvD9O> mv*z] Bے=ùZxUfl~װ(wrrV]^{[;mzN19+v̲%wdzqc.eQ>;y0w1+4\^47Wa ~{5_xL|6ų+?{}^0c|#aKts-u>W8?äq]f~]Ӛ`d HV[1!vOĹ/'KϞ)۳o A_ebȾF\B^rΏc_|9,,g*5hOo菮s~E-d6h`Fs>,t{ycom]O/EKTZ~q׳ƫ{"IAѰ8<V޳S}30$jPBBWAg0lwnkLDՉrp|exunop` _I_vyxŋ`^:CVp'] |bBqe|$x,Dm9١&:/b;VҶfQQ;BYFY=&]?{4I>4eJ -6HF!A5`C#@bBrm B0X]A 1EêkJtxi4Ud5T3NC "luQ9yO>w|#zZ&vZ.xmE~ˁ'Di|7EjU ˡH͓5RcSIYβ5@8 3֨ia-":ie-vZ t@5L)B9KB !T)s qgk[+,j#+bVd133qau##Lr,{ & -bڧAa\ @ӑ%Fc ᵪ.g/&e.)P0Yg` DVN kN)[ +M1UsE 2| 1ׂZB|Ti=tLj`J/k!bxyUNu(јȠ/C"8&Նm&aHXb6E~ ˔Vg[d`_', Ptִ`[ժXTLd+bH2!٠:&dLpi  .+%C3#OEx/S3DW,8rf1 KG/Kv9@Js3\5xo]6ǐ0( 0AS@|, ĐZ +Hp@rLx*+tEK"gyk&+ٻ64W=yX$&yEZ&5"e;YSM,6%Ke[?pl6Uw9N5byt 0tb9_.J)V@gZ9 ` lX[ Ⱦ2I_~N .;f %ad_\=:{^vwE]% IK\Q$"/=kO(@H*+9JOQq@il,@x@HtvPXoX+1sՕb1N x3V; ]Ѻ^Ko3fU5 KmC8q"^dCia;3lۙ`{&![TP QȷZ4.dj#k!0ƋA G^*tdnIKDBKJ7AyA3 vXmATWcA#>(|"H&jZV"!@T. U U:?my EduV9TX] XצXkp`(c#P. 6pi/pC*f7]H_GW1* Ge5&0!Z%tMކFL3uz5|< hwX@+f@cb- FyN ВHYcvXT$fx,k$`?Aj؁0"ep% ~X00",0wϢbt",U@k6֜cy!ڳ0M`QIRhW jTrVYAދ,:i,W]n$Lzt`_5iIު0!K֭hL6b1yڭN0:-gCyޞ:˴b^94fl/`ڢ+Z`8FL-FTa?;5,Fhkzr8JV  Lyqw3wm A x XRcF5Jt#BK6\D[5(QR(.*PP<* GTi'O H@vX ׷52[66I#+dOy xrpL9v%$ ^1 `\ة %p #EiFF|Rz 4~ aQ0'#80`ci FjmRcuz@pkO7Zo;ӒWMIB1X>ݳFrsP*m mMg TfD{p$k 6PX5rfF8ae%3#f@D2.Lčr$Kxޕ{87j!$rUj.u/mdHhRFCdN]XwQ@-0 U&y]p]ꊄީEF8Ø<`j?nz?z/n\,V;N|;x ;( ڼS9;*UB z@_oJ!{` N*_kDD&OLT!PPRV9@jH F kWJoH 5*a@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJX d9sR` sQٶ+,7LkTm@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJW 9)`'xQ7%(\>y%@_{5R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)Z%gC@`/j.YSW5+5kTH DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@z@;qRE[մz^_v7o6߾vRr(-.竇@A-^6Doly>(8ny.N"/wtf>\\F:Kߙ!(6577YjNķxckOCHg?al3JߒwhoبZAOv+Iw/oYuz.VWb\tXg Pk-'[NH$MAGD#~zT 7-2h1С6\3쎮k44g)wc6eօo.G$uY]um ,tp̸A ؊[~I^"[YuD9ngw\~9K x^坻ײ}=m7ot19 ^A/rLJ=RUr bāDA2mIax=U4|¯ig6}n0l DNX>=b|\3@x]yd;7~͏uu x7?ڳw]u'}NͶ7ڈ CkRn(G+y[Zy_~},K]ht%Y*!ȫo9,ū?1٭a_ޔڽQ5LI/к-VNPIT˨lS,~֊2՝߼5b '}Cyb߾p* &5(OM K gq-_}I8}Q?\eK犭j *E [ۖqi&(m0{*uď̹?"6P '7#n7{l쀯!7'{.WO I1@N+FKR瑕qZ h✜6*R=#9wG^b}׺zIT^|ˍ }[5;?ͽAEvЭ.}^mWNpY,?J^9H/gj wѰ>ip:*U~x_SעBw(\A%懨htlqD06u^\q OB^Cp6lyc6 5aPE1 cS0EF+\;v,Odࡶ_;9YYEDms"*ᜨRm) t"؅6jc<8c7svp0e?i~7|\V?0PeӖہw,S*h%YV$W N{3\Jk8cv3@-/ہ`i"E[:r$3Nj? l>F>uub6JZj}oa wj<=ovǝ\,\{b>|i0Zxα\Q,Rng%DeUkfq&e͜3c?-vB9ʅ5n-jm[]6dI%ƧC3=eDʈ<4:BZZTV:ޅL%DbM.ճx)Fnj͜{HbO :"/.Mp=n^BDǼ(/R^<^^IƊNi[Ce\jD_A)YY>)/~Ta7xh>|X>)lZ8;g2sV(7ϵQpi/WQߎ8ZywNav_WEe yy{IKst,-ťӒ^ E*n4j/R>]b軏Ӯϫ. 4Je.޼<}hnyqv?|w<8TQĠ$˃Z ";I=@uU֒>{^4FKCu 2)RP 4pHF^}4F~0,];>1.+~X_ >:SCI(eF{_3qqf*v;wBHV Z?8Cb0Β֚;tD `2?R-ĢV2RUJ(K!L1UJkA_jW+?-nf粤sZ?>)m(FhmLAD$`K5Ga]F yInzOޢ;z7K8k5x?9|}y" ]:V"M2.܏zgZo޻GVy~zv!W^u.)LGF7޵$2g~?Ip3 cq(J= )CRPc驮U4z8jU^EUTQ '$lP\@QsPʛ,a#Dt",:,eRWF\z@Õzy{/?qBhI|z}Od*u*3UYr}vc1js<7EPH̖4iyO95逼hi8+8NU*Xb5MN4J6B #:̌;"1L nk8/=HRIpxaR[E*$tZ. +*tEΞi|s*B urp [)4$))SAQ< yQ=7d3zH3k,o`NCe/>]@wY>V|YYO͗{lgQ.Ms[v/d(iqx_Vz|Z_/e/޽Hxh t2ە kz :& Y!|4@7?T'@T x2/gfk?37JPEMiB4LlE{x EVŒ,z7Ñb BE `'„"`<LQk;zF:5 H 9(4?] `Л g.9 R_Ob.g8~*p X<.RįJ{F\5p] e&eTŤk_9Kg7?5)fԪΊy5ݳ廮X{CQ, %},Eutw4OhXI{?I t Ḩ-T?ⳅ)v&*(hz.M/UW鯈^o i;U5`}lʁ:Ka#ڰ XS}y!!J[Wˉb^ "0eLb)QP6h"\lUD NqAAب;EHXG"\JlB 75FҎYRʃDm8z|Q;#gdþIF]$.bЫ4&1*A4:L-” FJw%M _Pbfjj58. åh2BUD+dG(o"XNtZTJSi1=VxQj%- 9B"4DY8xk!HEnh_c'f=kk+.׭6\[H#y4VF܁Q *9H^3fenOn~;R?7~ro)< 5uG(B [T`0i ZcJȬ6arCScH+7/ Y~~ΛvۡÎtۈ<>6qg-HRhot 4bNH33L`wD8DH\)  E0#=8vh @9.TSnh@RK$ UԻ c{[o{ Y=-'ej?:\Z*6Ɏ1j^G;Ă'jji W1h(i(Zi(PPܥ _Nj>_}_朣?~W^=͸~8Kr2ep|+3&,gh˗".y:Qg{,qWu<,=kp춦O??~z.[ lia/W=Ժ,j8h->(:Zn9eiJ$A` @D8T!mCDУ$ctB.˚W B{I.um^B4 # !p~ %_ ( VpM<ф`c ;1PYE1|mAp+ǽD$# F@6:aP>FwK9g,eGij рcLeZC"+3*kt>!G<W7H7 דft(`9rٽv/aWo.a-6g!S;c @`MUspm L9ޓ6 0 ?Km[7<2Iu!PխvΠo=2E&\Ց2P]R-uqxttxXq˪/H|R 쌴uW.Ɵ?_uQV y)kaʊhOzЗ9Kg?0h1WEIc`=aE:-N 8 *@ S띱Vc&1}ED&Z)+]|{4팜j5>D < !.S* -"e א] %/_!p&ۋ엛EX-=OCtRoQ)fg5T ϮHT3 Kqfz9 yPL%9H, IrBhZ](*6?G m;)EVO7z2Ζ* s>aexuwyCޕǐNn '\yb x/RWo VxaCEIĿӑ~P~3֐ YiY:a=%KPf~gU{ŋHzgugɰMת=HL7픬TŢ-;pYZYrsKg+uJwֈJZo˧&5-)Ȁ.&mܥyպC~ټ19n{=/wr;?&-5Wwyk߃A r)֫[wt\mt+n#o<̛ liovؾmcOO`wCazK3Ytni*`YXۘMϑcb&úLlO=kt2Lo{Üm$׿ȹ%׃9."^bR9۠LnLqXǸ'S'S'œ'lTVDMޕ6r$ٿBXWdf4Xx=c6v1 !O):Z^!Q"E5UjQ6*V^ƑoAX [' Ic[Jfusݺ',ꙻbo;My-'s~fbB-gY;;'|NJ bg{ZiTPtHhoHQ &X+>P*H2䧲-2yq @FI//,}RZ!3A*e#I-RMdώ⿸,FZC h6A[4hk *gݩU__  &2)Cy 'g 1zHTv6f%T`w_ kZ>w~-k^9-:%u&Q1M'rSLlMȸr7- Y/08[|?l.򘥎A1 sPƒ2˕S"h 4"9RDfhQzs2NtSqn6NGiXzԟ$4wA/4-#^L5J zk-g8#P%d!x֥ܵhKeջȳySܗH"ZTZ B2F Q1i"uHW A3ޣ᨜pY@P)O3 p+#0#}F>*ࣣYmDʦ q)?2ZJ.þL҉\O6k[͒S$Y9.0K iuw4ݍW4!p8)Jós/fǑ)\o.wZC2%n_:8;9]r7\)Dcg~x]VƇPe47lџ6+߷=?;}&8MZjO0Gӹx=76HHԆ?{op14|[גk[Ru͈ _MayGpڨrTpp5u}C\۪`:V׍plhuR\GJ@8GO~^ >7߱cP j_{zL_~|wݛW}~o{wȅ=|7V P\0`c:yD47?=iijo޴M#rԳ .H״B}ZZBU )~:0O#PQ-׫&~]\c/B&5?irt#^v^|$&tv2Ǹ#mtlII|/Γ2qAWF*a sH:gVoY/`cʓ:p՞?wqaY\FwVjX)ʖdB$p (Vy4 Dt٩C;]"jg[Η5v=ͳVН фIS<7Zz|qEJ^M i5d?lc LdhU{~i,$=2˪x38xiꔜ>/].2j ,d$I|SCJAt`)O._ǤٮX ,ݟ϶_Rw|4]E{әGK79D(Zƌ \0V"8o.AɠlBW[.;.AOJa0+`Aag^ݖ[8Ҽ6__3f-HjоATj<F)og}rO2r{`|)1` jሚ?`)2TμdmT4N}?~ϯ_Ȧoލ]{1hQR9'b&i .G%r7&rKT~??f 8\KiHwWhttQͺ-}pEM~xa/Os[wG3]IFciH^Fe cgƜPAK@ry{$ѓ^wנK4q| bozS{!3$B(QK-~3vw.m(bOb}c[Ya )WїF/>ؔm/zB55kf-֬ŚXkbZY5f-֬*5kf-֬ŚXkbZY5kf-֬ŚXkbZY5kf-֬ŚXkb ]k,$f-֬ŚXkbZY5kf-֬ŚX_+~* 3_IAtp~N Fq6jTy&8f<;hA"Ԯ%Z0؝D<C[(" =0hWZdBZIمANwM" 4dacJ+.} {J%.2M2GNxʡt^/tǻߝu\^+kޅ+.tqS -\ ]^,ɍysj?qrП}[ţD0#1Ds9,$тF ZEU.8;ʬh sQ0齐h2q;f17 u)p;7l~47(XΤc[m;ڶZ` 7TxRb1DgRces4&+Ƃ | Fթ=$e2.x \C&d ![@$BKc*d!@:vl;f{ؒ1T ΄v%ZD"b"'I(<cp4,U(GgW4]Eu[*66q%9撃}*餹Ɇ@LZP/*;-qW.Nr^ggR],ѡ],c.Vxģ@fr)9I‰ 9Vle:)%3s.$^S]IǶPvl}VDb?~jrN5A%v y[ Lя 6r(v4 |[KXI\85b"զc[P %r Ҹx.;8Gƹg)Ĥ- Z 9X!+aTrz:!qr6!gf6AKGJ:l8KpʍFgy +k/V`՜bWTfLR(ēyMOF4*4fAT 9trRJʍ ,o1.'+ٿmM q農h^ҚS-&}ۋW?}7? G59=^_}Zr5Gv_>CdgCjt\owP@ w0^=SP=qys=:ľyL00 gYڱFJuZʊ᪓`7ꘫcv)`sƔ,Xd6hAi O l[e`p/U:[.&.K;$- `Ed, \d9]cf_Sԕ\OVVr~抗{MLBONg{d T<8N?8?=ӄZKPh rlBl3 %Q m:z2Xzzkf?ٴN;X dB`.C' ӗ| (]|z |1&Zu7{?졅$޵q$28`眡A'k=#H. ߯z|ICQHm5驮鮮jڰ1h(92rP0Zs/w ΋ |KPrRת9mY M5c%m6DHlMU֖ `r#knd8dl&W0Fbn$xȡtOfМ25KP2M8njko'0̶Tr<]vzoF淊2JrH+m@t4gwawO4VۘE#yΤBRAD|Xm;\r%׺@5lDH7 &h"`ACT 4Xd4cB)7j<[֑aY,-!-Jl%#RHDcG:cv>Jrڀ\r8{/^XW{b>)@Ըg?Uhә;Ju9~in-Ź$jq$Uȧ<CLH, M A^\Q9^]7\;xbqRI,Q\`J<4IAwJyszPԤ'[|ηL|?ᳬd3zrY>^404)d˞\''wY>ۿ?-,̌ƓOb2β #w5e'qV Cd8]“aٳlⓧOh=.=y$qI䤹>5'}Z*G,׼88xϗySGU 36MG+]k>I/-.qVIRsRΎ=A2l2AL8|Rƪeͳ yl¿Õ^lGɊWRi,d55?k6nȅ*zS(?7aV̸]*E F7 VfۊrtV6'Y J4b7RT>H\Rd"zXrKaBB7A3AQA(…~:5? H 09*4.rPw0{gs';-ͼis"X1H@6 W%?z>,]Wz[\~5G'6]kN{a0Q.|T%_u˟޾dhobpfYq~9+&{JVZUǒp"G@sO`4_XX@A$vO}ҨO0~Gs SncCnLa Լdՠҷ)Gs&V?ti ~Ulp*L椷!Q$e ,"*"V' 8𠍊` sf Lԝl$\.%6!`BzQc |/1@[/5^#u22u7}#H\`Wi9 MbTc )itZ)PL&-|IB'Z<Н6ۏ DMR",1hMR*#ڄ NagHPJGrR \1x ɬ)*-s9n;kD`UD{TNsB-1Ie4½h$˵b(#qDG4)[A(DžVj Hj*y nNnQ{ԥ @|(;D%`of0>=VQ;G)L]OF'xRxieF:&^D0]…GrQ<(nx:;Ӫzk<``Wa E p{9քx|FPbb0&% yyvI+%Q[% )";csʱ%*9bYnזvy?G9}/wjgV]O'psUJzޛ |bѱ%̛Ux~ώ3eR,F\8+.y>Ml2a1? F aZES  wȂD )[zAp@7K P'֛ -K]̮D<8sWy2s"6qpX翞ϳXg3_ɻr\U k08-O0#N( -ЅG>L(8DY~0)Tt&lT0뿛 .q?gF1,{GV"@} =O䨼Z,y[8H%Bԍ:d?w~d$^^|v|)%is&y 2>WWy`l0g?\Nn"Ƿ<}&EˣC?i_' J1f/tV*.\/%6g/JZ'8`FF p6hKI~>>"n~%_e뚑M:hhg.a fA"kcX*5W9֮[b`ƍO"QvK窻/m`2|A˫u{fΦbmirmqN oM1):SaZIO[kHYv]U_ZMLt}XunKq6O zCtlj#yyNiv`o\YRVOs~6X̵Q*g er+g#%Ňp=7%Nqwג)E>i ?Iiq7ĂۦЮ0f]u>'2_raշ~՝ʛ{3eQԒ U(:l|(WzY64r5jԲx1ɪݛVPaZ?iΫ{ZEH[x7OHGeЧ fQۃm#"lΈrHceג rsͳ/in\+vgd7j;A9C"|$O%Zq!eA+r͉C:ҩϵ!eQw"2IgZx8_؁3T<{w71qV~J\S$CHQȦmIDN7{jzA S#9ILJ\*R@U ;-Q1䄥 ^*m4U aU8W QHkuj9t/+ rvvTcp10 SO٘1!e뚋}]/wi`yri Q # UΩ5Q ɢ1 DtJ_+/堂)j4V _ Y>} Ùz(wMg( 7gd{lSL g:]VTcq ))_I^-=B4JK7ߪ$-A㫒՞Mk6*S9xPTЅe/޼ʪL-qK*b}olE+Sy `qVȿ]>]Su8}Jͦ/ISW2k̸Hl %Op8X{{(U X3fenO HVTbWvzz*L\<[O-CSށ+TK9䄙>IEba L%4&c5%]1d2\3>\^Mqz8q~ -|̇1f Y  :li)1гm?$\KSz-zTRIeFRî8NEEϑ l>]йߡsֹY $&\•HI%B  :ص$U;:!_F`C{'M>\=˾dC֕;*t~uG)CR+>U6?^~TI.Ѱ/ͭ8DPp$||Ǚ{8,!x N!-#V`-9BHD,2ɩL d*ɷH;[B7z@[0 ^2l-,Ur@Qg9ObTe¾N7ePu2:?CU6 fW^t뗕T ^^38aN&,/2 |̆6v0QBLV]ʫs\qXtz0#?,B͛p>b|qz'3-SIc">?iT݅;9mqXx ,a<̙C:4xGCδTþL (L؃^'=ن0GZK";^2]ؿGyˉ7M2)A'qR#219c.,Z4|l-q>}ͶRU*Xb5(&h(oYwREb3FQd4EU[CQ=1rHRFj*e0F  0)&8'e[ikll{Ž@X+AVzk0%F ;pӲ6ˆO=6~ӣO͵p\֔*i/ V Y^r<&{nFځ՟Ε9 +iuv0wIfbd\8d!Y]A;!3X]%$1S)kڲE(2ݸ@/L?Xl c^'Kw>XA h ]H Uc8.RA~:g~V̸O{N9EDs4Hlw2זqn曚CŒF^WȎhuSa":XpKR EuNyA70EmSLwB_u[p " BTtCL8٘zq( I,gФZ#ٍ>+e(Y6kKt[RLE=| leR8P1O;omp޽˷&8||}VNtRrMbl+Vʭhe^k֒#sEoJaz}:1{߁ItA!Iا-<3 SܮcKnL! B%ˇU+}xRx,_ȺRѾSrYӹ+~:|Bc)QP6h"\lUD #ƁmTe3rKK/*pv) }DF]$.bЫ4&1*ԆTԥd ZSEBFV2oY)i8.]gjҽ58. åh2BUD*%dG(o"X{nQyuI\>{ ԎK[rD5&i&ijq2CH~&|r21 h+m8F;}!7%l?Xcxn(x ;A{V3A㪸wƮ_KI>פ#Z,)nD{TNsB-1IeB*d^"NZƕ;W 7D9Hi&It*iSa'Cavsvm")+{_{n_Zf>h(PXXGCLC?|Nd8)yۛo{_tئ{uC`!8REdGp"3~a[[|deU-7AyLp4Vz%  L_H8{ъҳ,G2k~fn$6m^ i@dKJ[B<- %&߸hi<dk>dr{5ȹ2b Ɲ%NdXT ~6pTzbuyu(\*Vx0'Q !cg lzgƅo40P_Ufn)ƅg3t~0M.M I셾/fv-Q`x1͖ cd/9(?:Tʓ2S,CcZs";N{>;<,ڠ$́9|$A{#$185wz1j45LLPIX BGn1<ޓ6 0.jipN^w|d7h"LPv%e0[i,*6QE9DRjÍua9ĝy]Ŝ1 цcoݹufLɓ6o xΜB̚JIe7w lg/-'5`8Src!()'29ǐʘ+ (rDuPIlȴle&Q!ereZ30S=P=6MAX!uG#i"8[`HNʳ@:Ny"O,2^\І"-uؐH~uuʎܠg{7]g%ZNr\.ʲ/b|P]A%,u>mNyIi7NW1LȉB1.sLs[#Y'&;Y<އXC@#3vX㬞T6WdnaSE`'t/H0BPN?33\_Uݠ}C2z %UOGB5BR$+m'+ӲK.*p@d鶓RD;od|5Z )n?TŢk K[ xTuVݚep-K<\dm9_;]#r#ih.nLi _eksrzUm;J\;+'Ⱋwey6|znpagz_{l"Y!ۓ,02I0bՒc߷xt,Lr4[ֹ}yt.gb}~/*޺q$"/O}lƱ:Ɗ*7`w[Hjok,m/di}'}=1 I{{\(J_]QsNKyzl>ۧNC׳m ºe2Ze!,Պ?@p RԱ8)+\jZ;Ӻ &#ʦym<רo BSyywj@ʽd4Zs|}|۫_&&ӛu}cץu:{mv,j=t3n$]A_,mF{tT/9T]{QHH^/j\PN0 芿W tlBHKK`HkYRجj=`бdJ9 DEG k_i1` '?btYfR.5Vuj_A+%u4E#cT nR@>'Q+ii.cT" 9 +G5RR~a[_$B}w{t? (D@ ?7A, ;ecR "v:iEعH$W,W_4|94GW\#VLGS[^P΃0ULS6&rFU@B::K KE$T0PNC98Y*%h !8:x]`ty JU ג,Dҍ\<[Jm":QZڻڶ .k- 2#O0&"eaĥ+m ŵi6e wOx\bR22*@d&X,f_GzKaK՛B:W}ft TPRHRY !,O'kOűogD)D G!rY h.o{;"] V*ʓC4VS_$wdJ]uנw/lԚ:axvuG3(PѮox8-$_R=hhgWٛ1?c/g_N+I_vmH&6œ7~ZqU m$cqH Cr0w\N^JN'O˅ݎy8|mn{֪cըjK#Lbu;-j >'r4[c|S@M /]LG/??ۏ?ԟ?}o᧷Ro۟~w \XM'OO b|ߏ -14ɖOX"Ȗe Cd+q P$b6s\^l5Y+I%۽\=qq^Y1ԋb? Ь񠎴v=I?$ 2 4}0㥶2h*1R|ޘ X/qgn'578,E^sc4:yrm'w`lD@UQXRyMVU;FW:M:GAM[ŕ|!η]#y6!teBwH\:DOY-zg- -Q*dHc:`W'EaAH;XoI_xw<0W? ;uyJ .f\@/MSY$LγRJ_]t&u_WS[j XRkʖYjo8 Җ3E1 0J%`_BN!(zZfI)&͍ 2*N+h¬7<]_˱jJ_w3K-hRvQ=\84:NsDEq!b }+䐑0Y3iZЌr  q5"i6s` OZW/]ӻZȖ6'6׼\*.z.zdCY2=m^ _J*)(b1ڈ9ԑ%Fڇ=ThH2 AzG uNTacͦWk?^o+wt+9iGt>] bbYwh̫ =Jot]":\]rB5ԌBqz()Oe^JBdWyBA VHzI6y*0Em5QUE$:WӏkJ8߂ύfg|>9e3J#Jj .#)I%T6#Rf` .7bQY_xg oh87dF]kRZ;h8gnge[E˪\(/QNA[-R[2%;C * TSۊ#Wb_>vk.37z bLp}>?QϘxOt5I`}T IHD)C ysM,Zrp$%dy3h2aS Yr%{iR xfoͦ7o)7^m9ƫcCQu^|@4,QƃyMϦU_ugaкDԝw%k^vY( + U`Tlv[IUjk^CF[tchq$HZ{%2t)5kQeUDBm9k~|GOƩ!Ӻ yir;2_͋>/wdK2~Svځcٹ}OzΪNx?O*$2z[;ޢIsR;X,]#vԳ Q`i- :}nQX-!%:mMVF O;`:D$ʤ$!Pt2h`+{k'J"#AT>c-UֶqT-- s@8(맔2fIRK0kGnFgW}@ /kVUnhT)W[hkݙ&OpGűwt$-o_-9Mn/^2H{&Pd*A-+ިHx !8[DxRXj}! +E{1Tl-MgdlUf,edAejFٮ?Td%vz~7=4͋??f|1%fUL>ajޗb+cR6$b-N.Rq\,tpP4/CU|RBPجe}IDV3sicyl:a/`\C[YR"I" YڢI_^k( !E6}!4{6#IApdsظ{aA5E2ʶr~CIIa̰(_VuYMyF޹,s˲Ap9G6~9~3@w2CoG0E_Ȥ] B-st09€i3 i9eL?e2[lbo)/>äA|# oXƊgVT1|Im^&_׃:ynwKuQR$Z4n1b2f:(^{[.&3pUTW]L_>_L~}E`RGTRJmɌ6>CX w̲= EOA &cx"se!2P_#h=h:ӹc&IcgmI %ȗZrVD 02c'I&GMR2p!=^׶8P$sԷX?^oWt]_ \\{,S*^;&NΗnJ<˼9Harxk u\Ni_R%I*LKJ͉.u*T[.}.ۺ%};M&jR@WsF㩖eQ6,alQ:RZy{D2- mQkgQ1kQRk^jF0|MBlUHveM] K ̓lN9}z{W4ǛF-rExPsӆTxz,`-8|@LY&=(8,]2K1tpa;xmnC."j"4H"ȻQ* JêXD@ $ L܍Wk= 0.~ xja݈eAz"fhBrI4 rX|ZN)QG*%L8 A8qKXTVBs0Ԃ#I-) Y:m8 N aLĚù9YÈ Fϑ6j05ml h'oCd{A(Fva>icKQt\؋϶$0E0-F꽡$&);k܇Pjl\E{gܩ򚼷Ƀeȣm0u}몴qzX3SDzc;RL]TU8^5%x ]5+aYiC(XR.*'WR$_€I:<=I\z82>@!\cS+(5՚ŋpRZ*$qB蹍.׽#xI֊79YR50y\\y_mgw7^^L^g70EK=78蟞Ɩخl ֥P_bpaxjI֞_y[7e > GLVpp tޟrZJ6/kۻ&[-zRshd$ú}M W,R!b4 RǷ4*'1k?.\|xˏO^'O0Q'7/a9j ăI!0,@x]κbtqo=w-dڡZ O_wʥ&.OAU~27?Agg1p&jNX˕ϝZ(!3rR곭-_1nZN7b`[~T7V$eD:l.O[G*M0 .tphT^YKϟC2NF5*RQ(B Pk8 &iI>bV:$trSxot[ˢ"JΞJ.[s%tʙ\cVIN"yһ=;Įed!C ˅!(nw +>.O3$N1oC6Ԃ=gϥﶖP|쵲2a&BP5;LjnDh:vF.x&y-nffi\oI3ΞM3p8g8 gጿDS&p g73q3p8g8 g3q38g8 g3q3p8k g3q31g8 gጳ^p8g8 g3~d{Ot}+%k-,"0fJC@+B@JR)uɂUZdU<B!3y_I 䶴tih: yjCw ?onyTEwaԎf~K]wX " W&p?l~kfKRbJ)ևfXqor@ݧ{9X_p8m\ot9Žc"E7 G#by:@'NFCKYyiˇ|?ScbHLJHL*+{ω P|Mp[o_Ȏm]ls%W+Uطٿy"FaEI;5b. keI?uQ}.b-%;\]I&r+N>'sz2Ar %&NTC"'X`6X xJ09\>k#өx8'k/[γ7Z7BW+n;_왍7pg=-eXq4թN4vpcA(N@XEF3JӘG"h@QAsglrD`p'u9wKZ;K8 LoqW>;-sO1l+gDV=ճ2zXF#-=fdE+&Aη{/gkG~FwmEK&SYV*5Z!͉Fic\yptp㤊 f(s[WС}ۍ$ R-yz`8-RLp7ܵ!9g?J%,r [)4Rp)N8O4TQ<^"o i{o"["ZSe|@ҢP8tC[Q uU囩߿.h2- (|@>j¸0`~[x?Wv(98(kspEg7 re+j (&&ԇg>5]sz޶G%YظptP5 :eRd UHB >Af^q {UbrSBm6ͬ:#>O05fB2zů C0TO'bPԲ_E?DT[? ,0쌾dqQIֶfdѢ̲#Z©^e=@%aBB7FSAQA¹\)U0^?iD0P"u^O8X/'Rx~{?K%$'~=j۫%_HJtiߪPvcP8QEY ;Pɟ x_^Sr4wUܪŴrg(7}?v\3(rm\~E884Vr0o``x9/_+5Zg4iaKRAG|0>F-rʤ K{͡ݥ RҖ:s5m wQwbݩ]\p:a A kGXJ Z'cIϑUDY$6*3̙%Pwjew6ߍ9T8{a=#)`>豋E ~Q(hOm\OI"L`ZdTX f Xőp.uXE0UbTKQ25 D4:khZj(K0J_\"B[9XR;.9,Q֘p$[ A*rC#eYH&ƃɻe>H\l_$Y[ c|4P0|L=GQ .%h^R2feiOn$( y@*߻ԇ@'_Kcݹ[]>1>X`V:ff1Akb R u&vdEn(}{| $ qOF'xRxieF:&^D_…GrXQ̊3L|V='k/MGQ`t r\W(Z0gךcMG)fD e)&2) (yyI0+%Q[% 搔EJwƂ/< cKT #=5a}L: FD'&(ͥv=}=Z9s8sW޸>ltlJ`_*YryIfڦ\x[܇^5 VZ~Z۫hM-E m4TiZinPP]ar2֑?>hgyǿP=M?Izd wo>;3&<hwm$IU ՋqyY6mt1x !O+i7^RYh5[U_,./3e$ۆ]gSNaG7n*J_w~|u᭫[640g'j*{zȌO _k,J!g g pa4^l|) }3p^zoaWWy SuQ oz3La wQpӧo;3}Y>Y cJ"R2j/{/ k뻝zyּ k],m|fڻFx; HgM˃ x2;paEt>{X4}:Z=M.8FY+8%z?u:#O> -2㚐&vOwR؎'0!J{u/Y 5G3WG[Ӷ j]ڼrw%9=)זy}pi0 (%2^ x6W[pOרo,NUR^A ~ehzcJ'J@c~UE$F y)kaPN)Fse$Ǫbax~R1-1FJHr:)S@x ;'AQ' *!DUaj3jX02b=6MAX!j ȶ5rZ%,N9S$`W 1Cv].$_ }v`x%mZ`%9]tR8Wx *نG硧is*0nNg#SbR=\.Aș>FܥOPMwqUR/&A<5 >(ŋ"גVƏ&IjfgJdxu.sZe۹Gp.>vzq ZTy~ފye휋\U[p ^Itƚuf,m6mn6I8Vi|*t(2.a'q*X6E ;k09Lj+\.$.IGkv )SԒ܉m1pe#Ea 4P.F1-i 6hMC9DŽRn&x98G˰, ~aREB"DUrmُ :_j™ PA`yt/^[qO1}YH\.h7%t&hg*C( 8֓Ƒp}ęGyę3 SNT`-9B(@@)6U!clVeqc*h%{guH ÌМ-& d!\kПePк^Ui:^pHjF$|=j\΂9#iCz-)^_*(ۑwn ܵ;gd7jPlv›" KHbPQVM 9* hKeW*{&MEB"E,U BzTF(GSA_NzUasrş}Xidk/ΰH=:XYDxH'SqyB!ˁ׬ǚOv¿V_ |Aϋ0`NnAv:# Xgg| Y5晣ai sKҾa: 9Wr&x6Jbxn23ac,x!w2%>uCUQY5VP#4MelT) c#S)+* 8E 逰2l'g5KHysK*2i-|+附m:R@\#-\5EZ>v.n]ܴ9#)A|לag8W61hk$i/9HQGH(򹖜N)If$'齻#31ήl/:璗B_uA٠XsS;h4vߌ!< 2s*enr*GP;eW_PHc*Ca+9ɲo rjhrn>/~%~W>{4MytT-Icz-f `*'jZFڭR~H,LkI` &u,Rٜ-rm|SR##z8NF}U눲(k#ZvGеܿ{w7#z"0]{$q8Oe.SK6{ sR 5(O+x\t/˟۟͟GXSHm K)Qk!KE=Z;#vyRyr7^cv*is5瑠ϻUt}< 6-ɆS Tl_|X,]*ۯURٰ ߙLGq^?|x(JpG9%^ <~/%*,_QFu翮9RM'^;`> {;-gIuRAk&*þ:#wɣOVܯy֢+u|FEU'l+l<腺+EJy)/Wfe,:u~/=:ncyPt 0H SBIC;_|&: fFɾ?aA[YgvF̦S2yT1LK+<@gb@1`Nq:v63&y&-lIcwEgPy,![n5L:'6.AβLB` iҟNKSmIL F>Mtj E6$5W`~F~pgW}x!KCa|c: +#oV?07u/* s }L*$-@yIbeA/33sf/+;+\(p!~ m0.w[).Zۖ ٰ}poOJ.L}rg{SI\T~U*=Iå'I)jX:no75Y5қ^NH#V9tndzx2-ץZa-PC|C߻s/3C㺓-ެ(TwX NW^YZUO8ݧVc*- d+b=SwT]uxF<1Qa] S~`wa&@؍k>Ɗ)xhNa2#[! 墌L.VC,Āpj&([*< epP*"AT:Ko:(s>Oկn߇\f;4kTfYm8%^**c`\İZqAk+o&XPJ C@qiyJ"B0nw`jl NrXKp޳TC sN@RH-!&"ŜφFjVDԶQ{Njah՚2\^Nx)FΝ 9IIHƜZ@R0[v&~h^,$E-:I|e۠q`yD-x/`:pĥqY=PzdpҫhR0uA"\J WQ8dgvaEz%t]P4DL(2)c@ 2V**S3.#xOA"AJEdu"l=5Zr) w A(< UPGdi'O H5a5~^-u&iD  YW*-W+fk!N#:lJ%F-TuBZ%BJҨj1jcY:@>E@29.J3)%&}*@$4. Z`5#; E|5ݹwZ#@K9`6B zx ^,gfC9dFiC/ڀBj.O#0%7aDD[k|T0{=zMf!FPb. M*p+g/ah .* ksj!$bUlU7\|[v)&d,¬|PD.X豈)[ 4bM(uU~UgW$,XD3{ P GRQ;^ōtmӊd/Ehy|Askk*Cqz0XJM(A7+M dPJ f׽AQ}Y/o$@:&~ r@wҙ@։ca9ɬ &1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL be9'9$&0 x&3H&d\<|1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@-H"1y@L*=3 J^:J1D&1b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1\&pHq8L: i|L Hi%1D&xb@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1&ݥ[npfTVwTJz4}< J'YGL(κ.fFX)ף|<[[g9Q0p=~{rۅIh2>/'?55Q6!6_/Ojx]\C#40MxԘ=T|/J ~)cF[1J'9KƗToTf0uxyawɟ)G7BnH:p6*1 Q5mdd'T]ꤩjW퓬WkM%V.(rɶi][:V82t mamB=I1Rgh/\)9[S>E4 `V&9'}]$%IϢ`:k}YQUyTrB@I.5kwԬ=v VZa-~itu܎I+mɿW%tsG{h~MBWKߌ.j닲g?=|Oxccd˛0GZPXfh7/o$dU-:`az>B߻q=r1..`ƣQKL?|׃=:$_QKuO%{7aSGgy7v鏮Zyet7 a ЂNqD 3n ޼np`z(_O3߻_ǃW3aǯKh1ްf;-}NNF0nzp:#ղ8]A|y=BR<.vHƾY'&Jʩ/t&8w_ nK::PH8٥k"^E]$PMc}0uI"\ƺ>0ߍߧ],]k_?1\Q$˛ۯZL^ S\ʤqK(˞4Ly{٦a4w3@{)R:4!j}2&4??4^]"Wk#|ޑwu'.jw-ج]>H=虴85 mnJ6u/ou{'v3|tF|5O#k~ѷP4۾Z"fmoᆹ/kcl7νz7%o_IsYu=7"emCf?/w { Rz|rJm9+MF謶lג9/m6ecDSoMl O4PK>[qҜݶ)G6>gZ>?^:.٭4VΥN?wMߩ}w-|U+ў<)+B9|7ϸԝ߃`lwۼHawuZ aسac޻N9Ø^Wp]:s΃-Wˆ|xKz<##$ANvg)_ߗ%h*YlS;X^diPJlۂpcI2H=`DaR#OdE{{SUᲥߏnoKӳwWarBSmm;Pn'ԙEb뿽r?w[E_ǟW2bRoE]h`\.Dr^߯Ҽoxs(U1Vt6Jzs/3ON!NKQ:D 黤>efn?gwoI:/e4 jL<&֎kqܙjk2|%oeu`/{ A$_Ͽ3ېxcd8p7Kh=^auVū^mcw%z|){?,qih+?u*6|S6ӓWl[5/VX}=֝7mGWhswֳr/tYk'Eˬ~úCR*Rݞok|:Q<i6T3{'v^FȄqzmh޽p6[XCՏaz|O֧瓓˫w5mk]z.P#&Wx,2_09.[RZtݭ-1&at~Gj2|_~n@>O"s`N+Q-mH8\Ӳd@srڤV 7;7=7q83㺴lյY}17 sSkxy3w_JWhbq~M~gZbﳺq]z鏹>\WY:c|JIs[`P=ˊss8ca[YZw!ktЧmy\ (+IzS=[ }Vۖk*!VU꽳[6(ֵ|l~+0fw;w9M/'A$ha2wYdx*kjJds\48*o<5`XZӒhf@m>BiTZ-(ǩŢT)2 %[FR>RLzSNq飶ɕ*Ffd j0 ^3q6#cwJgXؙflc,{,W,\Hm6Oj0`dV7~U 7"m#6qBՌQFBW:O * o\0<ɏӝ"6U42hD'ZK(IItYx$) Ӂi]I1bw&fĶ_ڝiǶ:FmGNs҂k7RBNnXL23P9}x$*mKb9xv4H@[8F c}9 c$`DL?ED1"="q1 nw&Je*Zo2ΐx2SD #$$n8XG!1pz ",iP& 1"v&fD8 -bƵKci6&BHC{\:4mM QR\FpCy5J؀Lr c4"qq>pqW3ixH{EJ꺊b>rhW1 5gaU }E?A+*eYz5frɀ! ~(|8RIr#Yp`JܡX!(QE M:?Qk%1A;X,k)!:gk&/m-09 ),o{BYk'q~)J, (H:C,7 M>zKrQ0agB"w;% p3"/ch އjDpCF,s$`؈;g/<^ݷXǒOm! m=.|TU~5:g)x˞>we\--" >ʳm5D[BBo54\`D$sfX7[/>Y9^`>-6S.7:b1#X:XY>WY/kw }r'rL)"ŠJHL*={ω P|&o'[{oȈJ6H [.w G(./Tʿ7"܍a9XF#=f4gE+F8fVCvm ݙJv2߬ndT!` k4'%EyÈ΢H*1ʤ=9Ρn#w(j_#I%iㅑJm΃ A^@hNxMwqWq k8Jb!:Mq Xxi vJy$|Z~\Y'_gd+k+"G"cwWɏeg6iȅpIU"ƣ1·YiqPEE"[ Rhh;\߲!>.L[YBEv>ʳ#ZTHVnDPMPTS/{ (Y/ u^_k~vAcs(4.r;=jBp j6p%}8N\\*pr Jx.,C5+OzUWR~NV0[Ӝ}~ l#٭FGhw櫽K .)틟=s"pջ|ZcVg$uϮu(ŭGIо]RuWvu]g?iަaXແ߃I*O}ؒ՟9Fir2AA#Լ7]uCO)|-SIx jV#|G a زQB}ysw#-XW}X֎2&((N"\lUD[$6*;rE؄ A:ojZ+K)~R5]t&n -'=vWi&1*zJa#jd%ӛfӂ/i(X>m5p48. åh2*"ӌp "JFc&=ޔq햔[Uy:sl6 CqɡBȴƄ;̈́ x8ZfR)h<[{~{[ [C%׭ӈmVH\3H|D%fW~;t0m(}KrƬ̍4pdh${J;w.s6ly;[W"Rb5Qs,J Ơ51J: '7D8!A)O`ƲSbPq2kʭw?--޿l!m\-j븳K$ VEAZ:L4bNH33L`ù=" "$]xA{#qDG4i+y9%(DžVj Hj* rIDzG4f::/w'Mu8kͳ3oЮ[$y- OF'xRxieF:&^AD/#c9(Og'~=гGo y`A+08 E Zs (Ōh$6)=|:l.hcFmP*P)-*.< cKT #=5auL:j[OSo_~ >:jNUz<{xPѕt7)';Z \}'@՝57f izڔ7vm1vK·o{EY,T,YYFEofÖpeN E -62TWOdz22/^>?9]]z!oӄ]>[?{F璮 p扠eY =~-f8˪R,A8+Gw n6^֒<}ݛ/_> o9yk 0jZPee|`mEur)LI<8% GD8T򿐶!p"GWb<M]IP%1MB}3zxi`7 S.@<p]vJ~T}0n(J+d&hB18w (2ѣH5X)=3/ ;N0HhgWѾGˌJ񏗶/0^-i"]gpwOy\_Ɠ2Oͤf&yͪE3Nx0c{w,z}ÉzG>L(S |Ңt0)+w0=mNM"q/`rMPz~v }<ތ/ {:Qy^Hοn46!WڱܸXs";>;<,ڠ$A9|$A{#"2 |IOg)KʖKYUҌG0K1(f |Oy: X\gWuXriUƇhee+,dƃ.VKw2pb>et>{T4Z=hbg)z>-HW R~] RzD@YuDHم)A>1)v<g`^n6%o7KOJ<Un+T{))p"/V@>v8+O8LӅ^dX+Ktd\ѫ(j8گ5zgY:^=N(74;J(h.sB4v45Z G8epo*}nhgFKqHwֈJZ?G&5-) <Xƫls֞t^>ݢv?Wbhn;_^i KYu,n1. =6T/o:Pu,Pk b:Dx3AXx:h$O)7R69-r͙A6ƒEiCuut_à4t;gPJ(jbQLy`Bb.4T39)a;:%&j]xhѶX ͛`}H}S]uz܉/n"-P,83y69Xⵤx y8ּڵAZZ-VsF`|3 ltɘD%Zq!eA*r͉VRK  S&aJ;f2 @0Zro8za9] ہ;"J#\{p]ED)5\ ̂&rS2^)iLCHXbܞbz~ќvf˓Km0MOrNE ZHI%r#Ut/m$%)jHE>ג)E>i @1 9SCQ$қ^eAwUpVR"cZp:kq*|',m5 "sl\*Gjh Egh(rE=ZV>,[}si˹ei02h0$7Zǫ`ԙש>Y` iB~˙~F9C-ʉځ8u+ A*5O`]^i~5i#ZE6^3EyJnw1ᨯGmd[V߯eu(k'vZ]ͶCZO!?L=gbe.iNiS%I$;T2YQUi ")N.A)"1G8 K[$QA+#hfkNd֞3@HFD0:RLݙLµߞM B_սo%P0 8+:mqkrTa3.蹋Ea0Mc,>GCL6_ O6BT#m9|cƴ Irڟ&o#:8{{l!a|R01[StV>kkalzU]x #`\{mnz]^cKbl $jf@ W hl-]75CѸef чQhYUEl8=YٻMuNV jɦVtl5ouߡ^#apҧg_1K g@~GG)[>*'~)?3];|ߥ统^|݇ L뿽[u#0 .aHPh~:i^ӼiMkf..7a mbQ1avM3둜-]L7 |WlXׅ<DAT]|"D8 VpUت# !k֓؃(O'a[zT Iʈt2]1LG*M0 .rw& ˋ)NpcœHfkq(I&P5hô$ɎN[t~V:0N|kTaXs 'X`T9P,DD u )I,V ^;@I] ѩ}V-%ϼV7Zʄ2@A}0A*1j 3ꄷ͒䙳s[Z(Uٕm=Uv9Uٴ/@oy?&ff> q lRyχxh6ɰjͦpREǸ/'qyT~55\=yy|w3yg2aU.R_Z7So4*VTl+K' Kҧ~y KuCY̙C:<N,`+:eOβMY^#w 5ᨙw33ۘf}3Ʉn9j2Jϧst=wQ%,q2~=X:ױǬ'g8,Uwa{Ͳ&mNA|.*}f -稂%"#m.ɖ\p<(\ w4 /^9p#ac Ʒ7ɧl#W>QjzG%YJ( D  ^gTY [3y xM |ooLs{w=!gKA^|X%W*%8xOBKw1(HT%.MT&d_z~1H}"Z2+^-PbNc\ػ7YX1"'br.Hf O+]z<΍M?LH)gK=k̰TpE3#zۆ-+J4U憕th%thX+eZ}o=: Mf>+3)vDudAYݽux0oFCu;&ϏeYd(?<`SQ9C)*,cO,V3A RQcRE*{mU=ЄQ[ߞ2)eE\f;4kTfYm8%^**c`DÞkŗؾVjbeXPG)2Q'\k=qciv=ݩ( oR!TR2玥B  !FTYjO;"J=B2"QGpϬ E4OvqvJ=d|HEb^CNz$Y2d]ƿ$FJ@)7GMUEzr7NTRZ<'JIIbE1\2'%d >„foa|XkQJNiB(( E?aBE1k#U>)BQcOI=i],>Y0F.\>E=uk,R rSȘgE¥:jtL:RȮPBm&A եfQ#x5RQ/3>*1c5.d)5>P k9 zƍ@ѪJ(Qkj$*F0'Uʘ8/b ~Jc; y [ZwUTiRS`Q1 ]Mh–l\7S;ŋe>j2in vEm%bxeY j3xWh%Wk1h a6 ) ) Ex&Pz؍ttgȌ:5oYy$<@Qc#n4!vD )_J&:&|K@ZAN[:*]:∬ @M 4ә@(qHseԸQcj L:Fx$/Eye(_u~ ~j2Wڮؕ(ދ0j{RaJX}Jup y]իf!1&伦~TBD jJc`V.2@b0۽@VQ=j+`"Кe8M2yqTncv3RTY7IcLHQE!hpD9!"M>ìrf}tፍ@ӖLd)h+FB+Xe ,Äc*(Op(v5k=ȗXP p΀4D. iՠ(}ڄLkptqXF@^BB&>yL_V 5ȤuՅ&vTle,XN5~T}%y E*ΊfG6Z/kI|ǍU>}4 6\>:YHև\dPe}t6F2= RNIyH! %:.p /Ma#GuSAʀv0J p[O hg]+u 9.z-| bhwXC-D;)f brogcyp (,g$B7e#kvpqAص7v*MM{? `&#E* <(@"pwFdUU%0CIaXYvc;&gو.PB@VtĪY>Zl?Hc%!tf10Mh%J"-A*5.-=z!u52ԿIn$BG l`_j:hIUT&Z*m0&ǍX܃NKiA^ Q-uDl z;k*@`HqcX©:ltiP/zCL Lčr(KD9)9Ǎ6&TS.*w]"/ #37 4I궂/dS((`jI*\ m\z!wrt 0@57[h#ًZY^Q4 !3 ;{*9=߇ӲRKv\'P#'*no3fSOKY.9D%gǥ=T^kkI =%`wB<}%Q³+RE)f%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J VU˃R`s@J k(`sO^ +a%Wױ@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X *"<%(`mT+JY 5*=b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J VU%R&p8J (`mO^ 3+F%+b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V}=JֻhZog?i)znlV}_o%2QE`޿#gxo54ᅵ҉] ͝+l5/Vy2i{.Wf}Mֻz^_eyt}inUEN~` xk)jj߽~}XJ,=vf8?;ҸaIJaC٘^{7 NФr@Z1/T-6Y:7u`OT)ElYYVCo݄,DoR-7YOO/7\%硫:DoPBP‘v>j̋|oW@w@.Q+╝be?nWfhWФ)"`Lڨ[xg(ZX׿>ͫgxe/;UJXv()fpDRP]*p8Rjk#oFۧL9^?ir:)P~+CR>YD@47Xa=݈ s*3Gzju7qSꁡLԂCP>nN}iia` } 27#{WIӨr䝨=j dVo;S#lԜ]զ!3N>ptnwo,ꐲ+Q˛taQWGZXw8a:9Voop`׏pzyoww|۶dw*;X?TO%ge|չnu~{X<:ʥN MƊ\(9" u)I(s=7 C~A駻N xk 2p+C6s\/ rիOĵQٻeWz9 _.T [\k)!]ٞgUߠ.c5GUiޥ#}lahEIN~\Z*z6>ҵ|IW&'q<_k+gdM`GqT/ /r/^ji;>w6>{;fן.^}i\A)~5ﯷVp=i{qϺu ;o>rkTsqkm4~83$Ue=W9k9z Q2Gƙ3g>QYyjWHԐlKnus!d!IN3N3'3~`o}<0 EyKu%|?^b'@^ƫ_:ՑHGc}/z,t4z0!`՝ɕ$ayk2_@O}-K8$bhڦkڴ* zGDɨ3Ň0)DOx:P'ŝS4wNf{sx$MsBH]"lD9IfқȝXKS"HjvNc$+)`?/DK_lSdgtvG9-̆vu2p|֨(̆2{ztl~]yO,/|qv}of˳L m^lyL>lϿy޹/ɒg>֮Z] <[?v fgʰ١'WWihtN9/HU+}Xovt#qJ{u/ ^LmqȪ:px!#g( #x ӛhv5rfV Sc5:b9[t(lٻ6n%Wc~Jɺܲ[TxJH^)śx=k~S^oY؟˗]p> ^pzEk_SM .TW?{dhWˊ%U?dlZ4ܳŻOnEw+ #5G0z`4OXX{=J)ؖ44o\mrmRI4b=AwŠJ;{|!mJ2[xZ{A>,>9tktD\&js_j-έ(}]OE77!Q$e ,"UYE$Qa,SEQvgl(_O2z"q]a(4QY6P ajL0BF)<2S&& tXR=ʁ'a1ϱ= ԎKKrD5&i&ħ@[r ͱ yF~ǹs1[w V:ܰtH|DQt8K ~-}mCDJArм$Oqr# |2p j7~7~p7־2mEv۟^ea׳oBwy9k눟ae}I}=ۏ DMN9 Ơ51JRMvd.wL1RTa/S1(PxOzC?o6g|imZV,SƮmrqg-HRhot 4bNH33LHɁ;jGi,8*oE1#=8vh @9.TSnh@RK$ U *,Yydcb0bh/%%&'%tr&ga>ڙ8; 5z לo JtxSwWۧ`t[Z3]nn&v4oᡆ7-3ɓuOF'xbJ)4Xm2^#l`]NI~a% &xT<(dz只U5צy(0:9VZs (qFPbb0'% yyKx+%Q[% :";csʱ%*F1EOMXI;2v1ҿ^ۏ/R5ҮO<ܳIӸ2WRɸl(X8ۯZ"fTZ7IӜTڦa#av vIQ"*BS˒W^ze7T9 Ū E =62TdEѴdſ˷xyQgQ3'IOf .՛% F YsGe4C}]6,@L]mRhΙMG$/~ݛ^~~<޺3H= =x̤VzZ _|osh seS(H-x2Bچ8BN %2Y7-svW# %Ydۄzeֵ{o S?@Ȏ.xuM$pT~j܂ZVZ!5TF1(@sdQ&zE-' ;{OXh lOj_O#}~\̦]݋_h\2z<Ō{f>܄W2ɸ?(/px'<^^gп.~ЇIo8p^Ψ2uH.< ҝ߆iԍ9{ay1w`yO=h/ !;t@"-/*yW1)S? !A9d {~՜.fβ86(Is>g s=Hxő~N^lTG˝^5ʖc42>=YƔeRɐ%:)ϗk^~}uּ߽D˴ӪA'Ʀ0Ia˃KW)lKv>{hfD4!|Ng憳~C+G0֫B#="D@[S4}egf0yӒ`[LGq:C<(+.uD5g3G[Sk'a7SWSwwR̛ywW/&rK7%V(kRR=+eQyL53eu}G1f Y}2@=%Y>%.Lk%cNLq2aH[>N._8d*\gKN>Z2y0E>ג10[$ڬKwӑE:Q|Y=,tI:c,U]#z&H%AR OI9LXndڜ-rm|Dc򈣾v ͯraUuDYGemvGеܿ#z&ѐV?V)8B CuKn`Eͭ$G![Kk_<>7,Zg?#$}bQLy`SR!1J)/T39)a(Pʍ"VE#aREbD tZ:g;U '6>x}O~w& (mE@XŸ<iɻgq* 8J9/KDT2wۜrS+ƹhv9`\|!!+K;\J*ߘjA{ṨRngI"&Ƅ \:3A(Qłb@P^cʮGIٕtW{3٫:~Z=fG0l$H* ޅ@IDD*g8h灱qL:B٘5e]w N6߂נLF\t_w䴭#{KxOE.gE4ϕ&G+C룽%[So2 ntsG$D X>RG9CX 6!VI=$,J.%'`BLB1 fc(, %zp^#:{5צ_u$Tyb~5ŻZZ 'RѼ]Q'EXNax䭔 JΑH2`1?nsvZc KARiBH"ƑTXywۨ>ߙ8 ΋CwXE|J#\{ ӘR,YPD4TJ!8YcLKA?b~x|AFSQkEgc@AR!H>¿N_"G`x4믳rh/;b3Q7ggr{|"9Wr9./Si8kT(6([8S>&GNpBSqBSsBœqB,TVDMH$MBsJűґ)pNUG;l5z_~]%[&;?^F3=Țg sV3jlu A#I ضE<>A@uͱhiZ/*JZ4+j9 ra: 4RcA3PG#$^kN*TBf^J҂0Ǫf(w.9$Z}NC܎ ?rNƓbwi^;{xtfgdeSכ0̏|=G/~^}հE]##z7yii2(`HAw|lg~ x'[^[FdJtA(D> g(%sr8_n@\Ǖ|)ͻЧF~4lt{ Y7.nh~{|ptiJ9GV=w&W○kuf^+y ɳT9ލ.iyhq{^?mW/|{rESb O9?2/^ aVxrQz7JM-%o`S3'76sY~aALyzcT-Sr8z~`||S*[]urSZmlnu^7JFjÚX_}2g*+wp KQ^j'~[l<]\>~/߾_}~oy/oG8ǚl <B>ixuUhve~{e.ohql^[{B|7o|渊 rjUP7ts<{3>NΆxI%vQr` CN@B/!'0\6䙳soi({B;0Odzѻg/ Kqw4ܑrjyjGA\͡guQ5\P]g.Ҧ٢qUfc 1ըk[&N>;R"` 6cELۓCfsA !w4SZ + >.IԌۅxu-Ks :en 70=ݐz [g-o1nvKͲ&t'e]ve ]kNgحhZ0V]@?eրuH+i|NsҧJr^6ڜQH_d"TC$Ak1&O_"RZ-kp>W6+7|in_((kiDZBR *WQH8 i,Y{4"eٌ6ޥBvTwE6`orW ^r]7%"{w^ b#RT5WlqhpVϜT2ibS.- :SP$"/E{rdkTȁ-A`K zTa zdƧK">9[W'ˁ1uxUJ1q?~9 PTj:i4^! )1ec5Ī[8GM, lM%8:K;9/}Fmv:z] Yb#ؒ$Ѡg*Q܌%Z=%8˦3kV=`W XVHeɫoKpj_Z9)sJhb,^"!vYTXT/M*U,8&N(  PXLE;Y eJ d<Tϲ["$kkL fB  3RH Z aM"ck9C)mחqYF{NhMq ^(~V֜H9u!P"Lu蕒RqPmBR1II|Th= LrG _r]V)%L ~X1BkUBVR9GdC ^ ɌUDNJbs&kL,Y3r(gۗn=, ʗ%A[QdƑ8UR8"E FjZgH!S0QD_p\.DbJ2]J L.^sB$g8 Vd؟;iXL*+у /8)$P% ((tVʨ2hmc<6q pQ2Tc)Vk|A;֋ly%4'Da nGik /O|[Wtc["DM,DD`]d=Y3mle`E%$\  NyqnMC_"] 7k2 0)\R)L"*,cUvOIߺ I딡uyu?#şN&GGuFtt# )zhti٫_oÏ Wߛ~q"1E4bפ/FSn;5vzwz`sprPu9laT'RcX|Zlx?jQ8%^::+x0`Ġ}UQuPR$?*S$iʄHMpmP APsEG3h hEaa)hvcr9] tR!8TPDJdESCt05nf3rxPF`oA)<3kqcZ&:0 쭪yŴtHZf%A. K@q1b?kY?!ʘU`!!@*iNHXPsެ2ތ<;~vU~f|r/?@l#?Cz WouXŝyx+o|,}z$nʷf}bsJS9 /3" 5ب1:'ۊ"T ꎷ6ГSȪT(O9iKwbkdlFNWi8XH XXjB)m۽y\fc~7t~298Ϗ_8b+6560c:ռHޗBʘB o)ڨd*Uk[-P=@`Pa֐6*p.QaNXW@)16Ff܎p.ۂ͸㾨mP{`o,"b)&OWղ(#Jh, sl|Y`Tl!f([31֤@fmɰ_N"QYJa3rnި_έ-،?1"D8(yFIW}@#YQZ%q2(ϒDt'BRlgPbHb"LH!ZM60ْ51"6#3Ou`\wKڦf\r\h}box}us!uVSag{DA&daVm0"s>f9ca3/bc<#@X}Άq>;z7sNڇQ,7D?~ǷWC {'ͻyӈ'o9}z}OQu+*( _]-9>M nx͍hd+' Èzm;yOfkk%2fh}uuGՎq=|'I;_o?)6g=Brl+=?eJڣ G6BvV}%2)sUǪ+x(2( | *3(rEt:O}I,af[}f.G{4i]s8RN[rʘX돾dQ]PH!2DJy :kN7BA p5Ef“9FFG債 .u)QcB_D-_]>z4c+k֎'R&kۍHBipyڋ5fgFX_y"DwSf'MGl)`"UT!}kptďݴ'Lkκ>뻧cdqO_6jecRr(*.؀0:'Y~z'n%%n;zN\*/U4Ѱ׃8dLqn/IY##>ɂ)Z/HN4ٯKvrs'$ox~T7o[z򔦳]>9FOku4GF$>?0xt׳Wۭ1vݚsnv7qYa;6Hvlpdyb+;)tnu/쌖.<ƒ/n7[EmobI'ќŵJT'4 4GHEdksz?aZ $E͛C[zޛ|g> ͑m/\!RhTn̡Dd( OFW7s}Ֆ??/V& |$Cn2Gq~Vvɝ-Zyz7\'<&{U\v+ˢZl>T4QPk5Aw|]֋H}SC̤a }چHY*q`/,zWwC50*:gOB[UI9+[*s"X&c88PY6rJNsP1hj[SfLkuZP./աחTCT֭rլPeQu\$U^Baz5В :N-#1NcndL&I*Ċ5im=ÄG"#WY_n{-y`<[Q0cc7HZiU&&Lcp2 gȏ`s18Jːku&_mP`F>:\-"#A'&eC17MmU%nYUCj*VPJRn Uc'g3Lxlֱލbv7s <+^8: @k!eCjbaZjC1e&&d5 #RYKIE좔\ Ȟ-[cQgV$bP V)/3*m3$2fD3!]0,g pPjsAuɑWm S,XˈohlZ[>cV+d1U60<[ߺP/@t[ XkXj/C*eq8-gҎHTK@yUmm*q1$HLyzX:;**@DU;͊ya[&sJ $ aH6Vx`m[κsR[`QDv6F82=bTnBl \*rQV]k)`2H"sျaQ^o* 9QpyX%\D[9DKh,wPzTJ$Q5@>N( j=jq^#úec4BzyVU.zngOeߔ_vT %sP] T ln-:][s^i^i*PZ=&%5Hǣ[Ty$%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ RUy 4tLJ ǣȷG/^ ` %7jV%%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ RUqGNfǢw5+$%з=.\$%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ RU `2sLJ RǣZ@R}J e"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R};J[ {5ڊrٶQՆ_`ܩ<_?IEϺmn3Qxn`<#trG))nMSCך;6wϜeO⼚%wy2 ތ~;?m>f̓kg>ts /Ng)qyEKrn QPwy [S]Et V.9'|r,' q>)eE.vҵ;͝)P ,w;5{#n}Ltb|rQo:Zy|{O// ĩu?Esn]ބyR8]&WFv?s Ѝ]J1?l^|~iI*!t.\M: ާ'|׫ިm窈~h(wZ)/=7Zڐ>h/bfGVR.iY2EG<9mRU¥0 sՈ+x4s_⬯`ϦmYl2H}҇MM2qLN\Fa3DIH*}_7x@! R["ƻ;n\~;{阭 qQ]ez|g 늫bd.g<SW+佈+2oC!q5@'#K}3gI1li4F\j',72~hRLQ vETkK"=a3LU&Ip BXpF$C&W4˹ƹ9>Wyuѹ.yE65dW:e>tA$KѬ^\U MxR./7y+_{Q MqP.TZ[Z,ӹﱩ,"͘4Gd SdL| 5`z4׳S#FfIY #Pl p9gUDŜ%73 zvvԳSSς^&5Yۇ.?.j8BKn,GƏ ZJhDpI*R'S*pH\ik/,D<RX ͝R\:A:Ma{ZKZ#11NP :e6/~t3ڜE0b[C (6 k0-2I(ݲ(CLc8nq;mh6B.pэYt2>y,-AXYܛ/&ӱ[bM]Ț{uB5Ks8ʬu:7Fql-Sc]5q W*hI)R"p'"?`-w0Wo꾎zHCJI&Xk㸐"B;G daiBS^=kg,hǡ$d_g<՚i:ٽܒ[-kO))ץ))'> %<٤IeN̔:%=}5Îܛ<0M*'Gwۛ.lJzS;7}eQϞ &VR{90x1cFcGŚN9,~9Z+Iҿ;r3\"NnY4 p?ž' w DW77nnpv7=ov?ТnZ/ν-6۱ޓ|Qf> A –\Qrǚ\i8n MJ)M7I)_@̍K$ ׏RK?؄ӎd50>$?Z~.ag#?p yM\+?JX1- "ŎcUjhRa@% Ϻ|G]an Ne;|`Bc\ -Pk:XQZ1¨:_=_pV]%ʻe|G}wv˒LJrNǂxYZqך $C)_@r{"?AkI_Z*ZRBbJaAcUpCk {%YMbg8joƨQ⫕ju_(UtySͻ1>Jb5gtxJ7V#FL9"V65;)3ѹ(#>}pq8(Ӝ JYm13*s\`NptX}SlRfXE@}yjƛD_w Y6 ^qå$bf゚L ay~wc2C~\|U jY}=tU+D2*˒`X*# )Ds/ú7܈^Gl3Q=fj0>[E#jT@haov0" 1DW(ĨDQUY )"rwYs' [+<_u=ْ<,|v|vag_χEg9l`G?0 {ch%"PNkI\nnNj$M~ZXtLٚ3CR8G]TS~$R-N%nFj%:XҢ1e]uve'דb+0 \~r?Cq ׊쟡Xy(I2]d^ߌs?߿;b7DLfE`\ .a0"\"NzY[{ |w'&;&zȟҧc֜Ǹ wL\VVQPĖ5q4´.0q*D1уswר.?V֨y&5jID0_ùUqąvהܱ&|ݮt O,.q[淰뻬svvh>w!JIPhE cYGGg3?8ZmRrś]YfTld1טWQ_|j^;%o'dd>lk@ұ'ە׷+oWe}[.:k˙safDI-Q8ɨJdF 7\?E<6B*lP磛}ɑkA|[YѠ 7 sqb8N)q_o_-jcY-X[d`QA`}9-'Z\9DNq%mNAEjTy[t;9)j%N==~@=7>9G-J?O=&+7ȎGt&sN*cXʦJXEf)HLT!\K.1T\*'G' *rmWutbW!k-OBu5,QRctyk$^՝\x&Jqr͚SޖhO$H/qk;9!:yVe#Zcѧ5!_͑Wyg|8D%Қc2DJ{աM*ˊ;IE#اrJjYDQ] z&;d9c0wD*ZH dgbDN!.QHv3@}\7ObL-ufc*Ҥ/9ɝP);Y$H LF _^|c3xPo@;O~Ԭd.0}{ɱ¬\2ZD%θ,1KaN迋 뻚t2`o$RTzLɇdC+5ؾ矮ʨ,KRT&F b.O4'S+=?YʝZlb8'8]VpYwBI]jˍ"8B uAM qyY;$C6pP̏?Aiu:5agprQ@lnysCa3FL}M"DT%w,b%Hה ;ѾapyJVd )xb5kTYhŻdlj/lm>Y$ۮWʼnB]9aXַqR`cur5Yfq e5 ! !v޲m/#T;0!gv&Y%D5M9.?+€;/X!/AY@J*iv;!2)Ib1RJ I҈f)if)8B@HFHsFz,P5?qҙ,f=v%K%TiÕ(nDqY;yk(Ѳ7z{Hz1muSN:u?+n^bV;5K d(F:D,(o,Й+KYkv`2x|A5;I B}F{}֚E:,T}'xKv1P1Ad g.;0*ԛ tçf, 8>y3` _Lt7@9E|^9 61ثb]C OȄnm6,@|qAAmk1]̷sb0༔|0/o;7I656dHvş-xyw2(D^&4,5ue)+UD2Ndy߱xt!<{%sk6_Lo/.D\TJ'n7U PPTX5gv-i@v<n(#]N&dhJXEbGfE)״G( )GnI70L;>r~5ϛ@a]fR,B'fE QrmnN.$ޕ6rcbe^1<I.0`}Zm뇔l$UIH.)&y JQ Ȿs60t 'PB"'ZHhYT%x|ht} o-]ʘ/WLJY*yJGI5z+ƛ@:bEW׳o}1 ǜ| wmS'݇:0$g.h,yxp'Rby֊O]\$]>i!K O<+i zxqQ%Ve;I±\B,6< OR!|H9fo^n8fsY>||qwo\wV'̀N+V vOZ׃l,sU)(- ¬bN'/C:B()"^~L֖uXḰ0`g"wc)Pbi&RJQ*r!WѾQ`ʁfh>o;.`QqRPoQt7gKNmK)VՃ4PBEC# uC5XGg^b0BiM''"0x@`uht]<83:tb&3S64qaр@[5)sEމdDI/ݴuDbłUtG(8`- ׿nF66T]'p `Rʄ3u\ߝL4/<+Mr8NR$0a~ cj&*֒Jܕ hH3$*@ ˷ecě~,͘S!R t"cyl/|'5.@9VEM#w^xryίgZ\PǑˁJPǍܹ|7k¬~pJ$—\L~ЃV?nXrS)d H]<i/ߑ:'o'/$g@(FKV &gP8So|͵ su1%S/M{tݟ ʹFcX83o@%}8fB*^Y?N,@ahxPE5w=v@'6wc^7?Lx}L|VWe@A^\Z0eX[R*+͠mr-bx%?j|I(E(2>/^E!m3$b:{^2OJ)\gkӕ).Gջ~A>*B"ICLU#ix(9d ͆,sjbm:^>x+OЃUfDPUi 0{=BqˬCI!iK~~勣`Q|9<$>dn%, ,T岗uY(|4rJJiYƾ%x Ac={qc>o)$z\3?[`;3ɵU.&(.0\#9FXEJhy0SuKSLLL3*#\{cN1 4:*B8Hjei*hP0j$}W I)}>&dN\K~}-'C? ?'çQL|.̷Wc;t[KLx ފW JAcdМ W ď'.;\ًKH-J-R )DžV)tѪZwPhl.Mt:hIDo9 %RJdI0 )'*+~nb mog֠\hDYXz^h)er@6j#@ ~d94xkwl6Z Ƞ 2"ͤK6 T\h"׌T iZJak /Pٌ U I{ipNyBHsNQ$I/ Ҏ9[O.RLǾj"Rہ+^,+?:g]#R~ԀL_Z}3KjX9S:CEIz<,=\VF_}29t"Ǚ1IZR%'"Ŕl܁VOe5s0FJa!9mUXr8d ސ%'UCAO*-  RLA * Kw-Nw5=fP'bbW9"9LoR./ $/1HaN#4'nqXDbjI7 ~b(=L/(9y[8YoSSmALrdWJb-]#mLg콍Y%ih16׌T/Da96Z58EZEqj2/|j'.Sk7Uo"h< P= ٧Yq=a#3rFylhkY:`)M8m['g9j2dbkؾr~DvOYbϔ繢Ler i¹>ٍ*`̶fiJĢbiD29]eDg IeY$ӴKG1o{]Yr$T,`ǝ6a{P|sϭN tDlx}ig 0yns>hϵ$IVN3RAYZAO㽬_g%;蹈1t'wH/ٽM؄ o%#6ac=E/f]윎pl0Dg$fY%iA>ҽ= ,>I(BʮS_n.́{T'ݳө?Z9fDӳ|X;j#17vLݺ{gKci޺i^lVOƗ@忟?υߓꪜNFWظ"dq}RxMk':J?:)u>Mla9x^cݵUW2i 3 9t5hຑ:'(z+$ZLS Yr (m(Sn$-dR{8?(t{cY(xFTqtf' $=IYƸ(DMwQoi$>h@V<[FWXD֍OqDiX zaa߿L~j6yfŏƆ2X>SpX*(QSU]rTzѿMf8O'M7\B4xԓy\ɸ0ȅ Jm3uPLb"UƸ@]{ޣyZ fꏀ/(E&cS%Qhޏb{_>>KGˊSy  EֿE_?Rpɶ㗟]k>_1"&e9LUH~ΧNqINXijC5U3}֨rLI} F(9[) T=}q}JbCrJgYDi%t@2ܟʴtQk "eJ9xy~N%hCQ?k9@(PcE}pB6=wiZi|,P /jP(*&Pʺ4 rid:Khƛ4f2AKN v;K G em]/85Pƥ9S].0K70ҥ#2tKv`ץN`PKP3%*&r-le(;hٳe)BRƁD_@R BH9Ǡ1;$D|}CTRe ]? E9/cԾ%F*(b9wҙ`uD].1ԋ*v@Q5[3l צwevF`xz=gs0vγml\sؓPW?fCv]s>%)ubݥU./ ͱbGpCs~VCp E p%1E-TI4(_3W[A]/ Kq}W1p"4) =髶/$vs 'GOivH`t%͹s(1wzzH ܈9pGàK=2т)Nh\f\Giv''\H f YmՔ@ VpkY=i+%'0Adf0-/o/$b8?=io#Gv {dݣFسl;'CS∗cW<ꮪX"~Gmu~uX!iK3#$PAqUK֎:S‰7mUWu_$Cuqɴlje9,{;e$i JA|k[1_<3@&P@wau5yAMx|Ւ=g׺g Vor2v_Ngc-`J--}Щޣ/T|lviTg~cBQWq>Wge4ܕ_;J>7X`jsƷ 4X`)NLzgwMMq]G,--K+ hWwBhDdf2*nS=> 7rvlDbֽϬ\O->jub:~[~yÈzN~lSpg=A@y\=m\wbuw74Ir*dVWίhI|ZWBv<HJ+{&ҥ$dRJJ7̫NXVǞW^N!Y3NW-pH`X %GEEMӽS7F%7IeA{rPÒȷz WuR*tb qY<}>RS_u4N4)D(?fZs( MKiwm.}*LŜVT_{?Oɣa9} a+r<S.[p5/+cZV nʾD 2o{i+F5z48)) BF˗]B Q'w^޾wecS\+d7, EZv.l;{~%2 $Qy/WT5=Y?|˕ޣ0- Q pY#bI ]|]Sؖ,V]SCK[/,Z"҂LzBۗ(( _ cZЊδƚ4V*? !Ehqmn<8@3V&{(’KTm)x/ ;t%e< .|=gEB[`V)f(pˮ % & zO\)8jQ"ʧ`To`\Mc7W}3漳!sAb*Ïe]WRBp3 a$9fY.Σױ.n:ꆣdi Fkwv:Lt`-̐bWʗ}]4Lcf#aI`.Ӈ'YޫR'qG5 uK23 VFE[hJ־,wۈl~FԲBؼ PaggFc_7Lg2SV޹F/nlj1QԚf+'9%6ivk˄kesARj$^2e.$Ҵf p|78Ѕ!y> kJS޻H$MV2ϵFTFRWOQ)W1U6~ q69+x!n nU"~T 8Y(n[cmO 棸rctTI8 P3R3LWi.k6VzYukL\pJ]Q8A,$ bmiJW'4_8E6! b zJ__D<`U HR;KibӺ@@7 l^[)K\ xX797?Ƿp,leZFJܲN `\} &"noެ6A<8P鈸"Q}Ě_'I"w0.j2&.@y4yM sIvU,\ɢ$}Npq;F/@}&\ru_da2 2D^|44IXE4wDJ<4CHz#W8gC47DEfYp"BH.]ݐaƪPI96Ӧq |"EيB*2Y9N)0`$E `QŅ.BQ*tWFt@N1LOg_{%B#‚qD%6,6'MxWj]o4z?.՗^<r>|}WU"U+r-惫'|5tfxm{oE?d?sz}0*h/mà?Ws=;y{D!cA)}_cUVր5ǏlǪw @8T(]K͛U!| O=7kPuf}t@ V@L^M*hŀVXj‡f36;n*fϲ7 c?,^7̆GU#Tz!X]/F){9 /Nߓrl^L:qJ$b"-:c T4sjmj\؞Jr&=Hȁ*̊z!wǜu F,(s`8&.Lў3^ow:0جƆ6gdJ]vX0y)zջ4â^VԶdZJ{`5ka$W^jEL&1`_VSާ6"D|kjB״g ̺sn)lmVn&(7  ύ҅9 op@3!][ۣD8\z):nJt2-+e40Jd6FKM OvOWa'Dh;w 9O 9}|قaT,U1zq'o_?ר73IfP F`ՏQX$| ܘ ɍ ܘ M=Hx@ZQjAN1M9r,ƒ'"OaPJ4NHT f,vXbiFk<+W1zu*1LZNjv2H8_9[`T`v<|n`fp,I ^ e,sk?b}}ه,`pPN!ӡgOt 4$ p."c%BD(Ѓ$vrd}OC!|DV?,#?͙>1b52 s*2&s9{Bx]O[oL.oL. K$D*bQq%(())2IYY NJbcAN!b[{b1^d5xNHsDUSzK&\<`E9XŘ(cX+͔4zumD+8j7abAQA:h.DTRBTzdb&ж;[U ̔:v.=8rM|#BjbN1[6hܘӠqc4n YSH42`ƩJgRdM+N6Aa&un_zSp{6wH-l@f e %>RXqբ_kO;5['i&LFL+dtP t]]'gտ{qXGbJ3vʔ5eFaiE.DQ erX*QflGbMA8k X,T#\:/^ʚ #t4ѹLd*Lb_f$v)fd8IJwMMOƈ257vf,͐nnrZ,_fs>-mL8P`g qou2+LV 4=^}DIBS!EV9R~3RN61U(L+oOŦqj/JR=T黗><==9n!n_1i3LBsd9Tیԡubܤ{#(nZH!jh@KZ$Ҙ n޶tVkvhk]NI>|Ɣ1RprIFơՂu lL \ P iOB1& fd*3ňA[ c}0>E&BtOqX&dgEcqznrbӵ@#b|[ Ji ;CNriwkV26bߓwhkQVp Q U' T;\\[a3r4~7` ]]Z *g)*Zce 2Z=ܔ*Z=U'oɨ] 5NgX$I6tyv?A1C]i%W({|E"wO2kG]U-=f=ҔR!ZStcVmgh)ZÑy=R XjL0BF!p\)CS`0 *dT8RJt.+@p𞧨S.\.߁ 1(7qFKPl6gÈT٩щ$pf̦ rLj|cU,~x+Ȝ(,z&Z ȠF khrN0G4+d\oXVKA&0SF(8( dQ,'OZρ'ך~i=hHv/lTaD%iJ`N R`zKW2oǧdS$5g5!\3!4CF31 W\UXSOFz|EU&qEJ+k/ I= )3VHmepg@xc.hc-SgSZl9)oqu(Z4y⭈Ng]O\4>*:MSr\C2au*dlMuTځGtX"tISRW uZEK*j-ItrYAj4#-+ ot,&-VO'#lN` S'b̥8S kWKvCsM`7^',|t2d*A7Q.U4m/[8UT* iS_&v#/~!I\i pߣCopo­qL$fӑGI)58t,K82-߿KzXְ;V|#^/ʅtlٝ6+v;I_\ Bɼ*%)?'QͦZꐉHFD[qKeg~$P "㥟t?VMF -f׭3 "a,8 sG(2aR,JH"5jk%㽘x͎ʵb,l8gbpxֻJ5P"5F Oʚ .4O4ѹLd*LRluB- -7[ I!Z"b1=:hwK1PhMlG:$ɑo z )Ak|ə)iʅCVLHN IT EhKbfgLrAe2C؈:Tz"HCTΜ+Zg2:3Buu^j ̺w :Yc#tvX+V-Co/\QbqF2ĨȀBG%DKvJO~{d݁Fr6qq71w$bYGiis鋧}04ɠh<0]cNg%ґkꚐܦ(Tb!zJV03 iGPnax4gEpl ،]l 3o:m @P!( eWȸ5c1Wןº2rkWzD[gn6[ ]Mߕ* 5'bt'M%ϗn´7uc.8BUM7>Dqe#SgrvKW26PjOVH5Xe2Qq\br>'4='&bm `Km/&o/?\F;ݗ)a;T$9 eP ewKUczg/]kH=G̦ qM]YoI+_vش2#oh{ ta10l]ŖHZGdYY*T=4lY VDf7}*˓СͰ*Bc 'zm3JskXѝLvYT4oʎ6&j=zW~ fC/2i-IƩxiIjk"? hIoUSLPE͕f, !@|9-UVv(7Q˷5创Ո!y3$U]7ԿiMvИ9Lb36,^[(r0.>?ϮN_-~pn~^4?r!sx۞˅O6$ Ur aAު&ԮWPZvښ U ML"PU/-Q[oy z7U mfr%F,cݶ<Մ!{qeNUK oy^t[֢xhk@M"R"scoh<:ሜצ+PAn17hi(W |vhR'(`L'ş%^'gH̘m/ܱ-305WFf rɟ ͏Q^UU# 2̎ʩ-yR a&x=5l/|2T3Jx^ЍQ(30%d*{{eJq*ԫc=S(hЗd쳭4hT<&<.(2ÑZZM-E!fE@MK7Y衏ԣ2X CeMgw;}3E#"ϜF 2b=i2,S_zȅwQ«%JK8':AdhEFKmP!ܾףTrBTBpcgӒh(45BIc;dZov1Ci_nw1p /KTַ,Sb<t $ lI[ =6HTQ27u`G z 7`7 U-l$*|zj#" /Ok6GgM0#YgXcw` Գsy7 RZhIfj3zI5)AiغT<&LkW9}}I]G$ƹ6bkŜT.+C]eZ< H6eI>xA W^zvZP:xyhNAiVR6AkNs5l8e-u|T @E4iqWVI˰P b)Tfd}=>aS/ ebs$TMmmq^ @ ('aqDz:~"4 Sm]hBߒЬ>r E!lFvIMp{>} ~FP=wN!68;N78H=6= R둵Ʒ yHnɆ*^PXCی𗳳>ۻ8cO.O~?iL1N֢tn}6;C iMڼVC,2eחSit5-ATMðTW걓u$-r iMtT4/_>.HwT2̡m(RWl|qCJGGI*aF ;Yd* -4|gWg.^m2kѥjBBU-ZmG"f$y^ t{1{)(A):>_w?w%',PB f~sӿauHbwc<'E㓢r(4G*-^9raGK%= x@QDaW4ZDl6B !݋ JnC6"Z*r4dqz!Wx.B+IRFN q ,BAbXaL )dT:\rRӉ.DŽIې2*)$݇d<Ǖ0jCmQ"fr~9 %!6:A DHe9JdZs 9LI{K|2&ϽΉA9[K "2?[ZJ^or~Og!0++mP+M*?&*I U*2YbBJ9+.gV(UTyP[$S1~fBxNY,QeOd?t3X~ vkx7 kh3>r[_VXc(ET 9:bMu"I@Á4sc|' oy#X{S&9<% UB\ylԨch4ѩR!wGlɷc1[E9R;IL@W<ڌG(!G&sDA"T<$@+s/Xtc p&Yأ{҇jMtӨS<&>ξ^p8*TRD'QEZTQ:dr!G:!Sx#io͕ͥqOal9@Eq"Ꟁ5# hЊ1nJ1A.R8lphiʀ.W=BmMJ}&W1Z̮WN*)d;/Ex^ MaΕLCg3Bn C 7 GKd1Q<33<ьvl^'G h"{>x6}ޣ 3ZKx'BDZ“q跢 m2Q2}&[A)@.f*DFK#ȴԱ'3%fkq!$FA4ާsMO0W~3r]F/;IWHj̼=^Sh8 XUUȱ ]n: p*S9r+x[Mw=yh6=Z%^>Q,YS2"G VM2xkP߫'fZ,[3amZ"eEf9LeaS>I56"< 94(E!zt>Xe䋕jfNv=y : /SF #*P!XʐK#}6*>`9Tzħ%q9)ĊSJL)# E aU4^\޼,MgwGCp:S7Ur+tTyZc`UR* \$1HOPFZ((ɂg))fF/L[e'ѦyQ{qޣ9W;-FJ6"#ƘqҡfF\ W?(2tGHQԬ Z{dbFeLڍe?=y兆&+eClJ.ID1CuP7'F}u b6r~ QR(VC^RhPìU%Nߒ<:}e5\++pKY S?/ei, /rl|cD#A)IXt觨YBcw&+' sGׇBm(:M;\1-4vb,+2h9KB3QŒ+7A~*dDV_B?qjo?\?o_Έlv*$<H,1G5j4kG&uďRĴL~/oׇf/YĽp5Kޯ bOPQx8^ZRCC%ͥ'zeq%& 62; >:k xkY0{JĿqx,B=9B-"h( :=I{O+iE6ʨR1~ 4Jf˜eetj2}sMT c2BcVDuǖ㷣Ŭ^Uӥ[h)};<ϔrK(Je̙vиCdDf Z -czFS!-C|]^PѶNmf/b蹍"{ިT\Fjy ށVj6v{E(sGo}.S>)s˔)sHJI!~0 222p[FGਃ,R)SՑ྄zED3*OԺ?JgcImnmQP8W^78Q VCqFT BP|uҖjR7T Ae d+OvEb $;4D%uQj=%;dǏTOJ> 4q7ZBn 1ac*؅Ec'hU鹠GWdT:3%4%K/n&uUP {C9* ås }J&jŢb/?j`k-"E JW7 R= fMnѴi_ڜf`x*+icZhQZ̎]HpZ-$I^d 6_C?def!av5S}]Jzȉ Jq=77Su8ꅷfM:O#F7Ma)ޮJ51υ_|):4FDZ\|˭5и;>kD QՊoQ 5: .OJ&'^vqrLq<6ġ!1xW iAL_k[Ad~;>BL' >o4+: T6X$I6Gtl} ebiYE3q ! H cnh-Y"*ւfR5_>Y|hPc?iؒv˝9Lvym:$XIDǞ-]":h:4r_໡C6%v76}|-8!sc8D2N%A$`x O)pAqvjlo1o$0%w}mOzl-Snt #4}v^1(l4Skτ x@&̂ˮ\efpʄMvg95eԱBj٭G[no$QkŹ`$Aڊ%Y֌b'XHjMnÉ鞮.E>dH67֐kx9rr'wzzC􄾿plj6ώްJPNN{h>BFvCӪCFG<܀vxH&3;:m T+w 4^&<] co f1@UO[xɰ)o>Pe6 'g w2[r4XiG;: XLm_ڡꖦkPj%cpi YH2jPkn"gIplU:G((9dRcT]z@jP|tVHĪz sIΣynTTցn,UY 2]&[\\.ńtM;s^bR C$C":n:[y;o{ݎ]NPJMe.bTK)(@@a6+eP1tA1tñ8ⳑMyk=oM ۄ+kw^ꘀi"I蒋M^,ʔ1dM}n4H:>D:,ѼvyU ų ỵZ{mb3V'eYȖKRQla+1;u8&f2,%jۖ -%' q&h~Tvd:v%V;ŭ.f[s{9CԔM`ut$tOu WdEv- ,c%p,[t!5~Yg*B~++ͨ'{~!( a_0S:{?L@XvWǑ- cM =zs /c%[d+( JFk|T=J!s&\HEZdmb]q f3-xU%K, &Y"eq0!PrB4B4B4Bt}q(DD6Wv4%͖R-HT!SZ~L`wճz uGs(>-|*ZTtç}(;48^*aPN׀5F'iYb|.;^k-yp$3/BU%Df֩D5ݘ2^J@lz*#' C:8[T!3t4q1y]y[tBEEX"Vߏ7TT&i5Z|~þK 鵪nӢdz맯_!gk>'9*iJ“A=o)eihUFPCvJAT}u;JF-{w<_v R.ZP",5O}>/(_Kɇޅ !eU%U MնX2SY{ZJU@ Q[ ޓ-Jyb ~)-gG^hj<;B24ޠjrNRjP%&iWHSL)d4BX~ yPl:+SWl+4F4 dzY,n1(LɄ-,Yt*TmkZOVZ@t 2H^ۖ}f_|TQV@v*{TpD/=8I^2snttfu~{Vս/?"suqھ3u:Ez?ߨTŷ}? kx{B Tpfd_Peϕh^Zw|`cVSF7D+lȨ%[%Z#{UV]m B 1Rk>{ ~1ҢB60Kg *[ U5V55åOP}$H`ͣh]Ds{yKAQ-l=_SX"*(') H Q,-~]P)Q-^{උbHVt{BtѠb.[=4P]II ")IĚ طuL^սBhldGv65\YKbPC2W4d_}m{|XlƳ*L[7 ,W,,TU&Pvdj&+ŬC-cT`=^bdm,iY=<`2ރ3n(ZP}: PJ9uQ}^g}G2BK) k:׃B0\mˊyAPy1T/*Ue@]i%T) hg2I[2ekAVDicԀ]K ZȆ'ݻd<*>в6NHr̺q2bʖeU])}zJbU zN5$<MzQh#[7ߺmpC{b48$RNqM5`Xh ڟ#ؖ*|i[OhDSst+& *蔟Z<]fةVh_ [1B";q<ܣ߰:k2!oi;-ޣ컹i;`x@YV}#6=cjqxC|vXnG@tu9w!A鳜;[}6ߔ*iS,VSt{C bdX^ XTكt|[?Q,={vE!BK2$dVJYHXﭖ2 F[' 1;if{&` *$iQR-Z- [ r5b `"%&qFlhi}䛒ᢕ h%Edx-=s7#>x]2"xy>0IGޥ-ݺsgDI}< w} ٠!LDѸɄ*FȾ-(*kNjB뢼zҟgTIWVa.sFrPr!jGwk*:I(I@M-.C4jN"Q(V'+)*5JSdLCTxS$s2;,M1ޭx*w );xw7/;W$pR^v wgi#~:I\~JMi}mkꩅ֭7UAIK3a~H>rt&P:];֜ 6D~AI4(??}.1;1E4~.b+˯,̉ Uo`X*76?V@X׫OrN[CEAU ttc#mFxW vhws>Pn?4cSp>g:(VNY ;ۼ Zlk%up+#ٳ,~"8Me2_Fk^\[/,eBKY`qR)y>@ FbdF5fɀp,/N["Dߝo_Żº/n2 Sb^H5v_[٭ߔɕ/]ol~"%:?}g̚_KgPēYmt5{ 鍸FтbyyqZ; 3(W,f'iqkAsV&ޥwyg-/\^5I8qy/ɊQ|^ 'ú#O-Vۢc츸V$ZT!)Zd-փY o^wWQ!v2e}a`@ݒ_}YwFtI5pڋ78qn6?剜ЮZGZ^}}F/+QbW"oc̩B7+ݵm_n3s]c_3o?txe\.1?].9U%VaEB ׋lKMA K~{ơpA;P@ rwV 4s9.K[xyfx;Eo{nbVtsCg:BSd~?{G.HHJa`O$'wI̯_ӱ՝v' 3Pɏ?ٷЩ8OzPx߼%D"L V{ @4Zn>5%G4lp˜[ `OO&-~Q7WzH!>shOy`6F!#T<@=r$hvÇ ;G0SnP5|T .EӛNZB.1p5l":D AҖ&y5#Y(F@lJAp O%j{YD so~~u[x0cY:EaiGsZ=xCƳYaa 0#tlRe^OcFcCpk7I\q>5^rkRL^{w9 VC$K-'n|%lញu ;fV/l(&QfʕUҶ(05$oLQTmLi7v&6~kJԪkMJ.7 ?F(9`5G_byj%ars[z!TMQbsxF-jUlb IbZ +XEȮfuP%TCt0_u3<7zrhڜ۶n9'vm Mr[ >{f`OgK&2kPU@^zjX1ɝϬcP8 OaA9 .{"xw' 7cM20;K)ӷ& 3k:M)QI]rovk]n͘Gu #3!5 @CV#~iurOH-o$BBKeqܾ ;a䎧c~A o~z'+jA<»KeVG֪6\s$[N)#RXK ZC&˴Bh5$k1Kkr#[+N(o#ݿ&RKu.ՉԐkZvn,v?,8$$ ,!6{XNp1} ,'Cҡ-E3 +Q$ gkS|D4!k$9L>/ݷk%UMGȂ-ݙ>iT"Nue*37%q`4ăVdݤdE%*pSgHšj(RoMJI&#)0OW#D<~ܷ͘G~pƋpA`_Ӎ WP|"\6#wRQ@17,*1Uə%Z\]͘3 NoAw:_qpathUhVPIH}>:oo_n`m˚ST_-*Jb1Yefr ex|̵Ӻ]J.w;ůc!IbE՜R)t4;8~gܙG,3pYÃB%q'ٵM}GBM!6XpT=0=lz6J$D 4FK +Odߏ&+dsYQBSIVI%DžMb>q06[n2VeE96 Sw+T{s4f5j|&ǜdNj,^kRYH䉬)( 8i[zGM,P]kp'׃Φ2NG4|WjXՉ~[4iiN7AψC>x[ṝǬr.遟Kb;8%Y9ly ˹\nI,ۼ?ɑsOFٲxȑsFt[9_[零v<pilXymz y%ծ.eva_ߛ1FXQFGCVJr6fSOnyhz<ٹsxso~~,VE7Y/wEO_WS%^7e/㕺MN{\~Ղ_޿/bу0`]O߬_Ok'dv5^$)Ӡ`C?D9^vإg߁7Q'R1v^!ӏd/{cc0{~ H7Fvɂ \IKN{:q'Gm`_1JH^G;Rm\<މVPbngZhԈù <`IL12N-f}k  d}y"r"g^io<gwQG;b uF#:=Gϙ{vaj}{FGq5, ,2,秗{n 7h{nDF{]`o<k4Ro6$Zuܾ̇-sC-[g?(ehk%@"m0Hd[T^=!saE9h}^t=xQ <`{&k4㽨e=Ra jSh I׃֓ XIs'ނmaosH> AKhC; vkm(`%[B=ϋ쇓o__̂W~^>t\dk+sR"Q.? z D@=@thSogޕlG,_N`fVZw?!(P $̔TT RAd `ZF n@H~mށby(?sxcÜ%Δcw? ɛ=l?}賿o4;͘"`mL$vog<`ػ)~|10ef]~SD1lބ"G7AV4Z#KRR1[yx4;|^Pg=lKa$J *Bm~G`zsa>SFI龞"70;ђ4eLywIN>0ْGmRñFr>Sf5E<`_C!3 : ! rEƃѪ'U*a &$)gp%8\hIQF1r掿B!@')k>3(C8icWe_,^Ұqd QפhYbcJܬ2ZŠj.jJ}8 qphOL2zv/Ĭ@$fL^^Yq Į<\=Y3MzN)Qm.)(+p;Ŗhjd]P'_INfUF6^n<~=NSsQCeSsCkMxMsϕu*3d>`k1ufzfj4+wo.~Ȯb6Ԕ|殞2/1Ӥ3˴m7S-R@ Gs*UI-[L D.IsB^\|̯]fŠ|i~,JK07.g0l~1eIԊ9wp][\<ڎzoQBC]# Hyz\ß,7R."fCվMsgm%z+rkR]~FJM{Tu1bn>Z#xA C<ʀɅs/sET!$7/g} QJj\XԖ(|9f|g5 *WJif$ 56&".5fGm9N;ssɏÝ9hLGه's;ޒ}[ɝ} Κ P7f% j_Ӯ!>JQd7%)Cz@:S[%k!EZ>X ٯU2;>C-76:2ѩRc3#eIn:󾚤 n/Bip@!@ǬI9RjӄjߚŧUy u)g"f,x/`"T)hoT2 .ptjKQDqFOKse#pV9va o7tu} "#a,sfSJCv?,@iz.41\U%n Ө2u4$kdUT/zfwƗ LL3XobX`NQ!x >%vz;ն#t)Њ " kkL*@ohʧ^ܪ]]uI\jYE{)w97 ԯZ5 p5 GJ싘 @  14@0GX#|[N4$Ze[STyAC/ןAʲ#6WY`jMm6 )Mf (dL0\9(a<6؉몜k˃a$c6`ij~T?._,DԈPߦ/x)-q;=ѬOua꒳óog"$`vFߑ2V &ֽ;y<ڽ]Nw"N<%ys͏85t<48qZ=&*]Bogn+a{ܻc k~H 9^/y~U~oxS?u9/Ϝv:/3߼{1^i_|OKAZMGqgb'^ GMGq/9 2w!THLO4sJ~SHs@aRux,#p{R)ivWe, p  29'Xfq|*@>6@M;oT 5c9)M- nԗ9ip@JO HzQ? Hr>^rVQ%|Eϼ8%h ="3deyʬ̖m0{l%Hy`< S8Ldm~7E MO=bv c8v7]QBgA6a;[ Pw1gByܱ]@n;Egl_hiKsVXӜyLE7hAcM=I w]{SpLi݀7jGvGD)َh 3#T ~#6׸VrUxfH淩un]nGƯ^ax9Á|0P3WQKHD3I4HSD[؋ӨLŖ^&)kaT@[9Ph}b>*&,1F2WW Sc d`LkSS{JW| c `'ߙH@b;vNTzqb.~noZa}tp}6vP 4SS5EG*ÕGAڟkIQF.$[o-Q˩5^DpR>O|0k=ݟna?>{*89q©ݟ>[S2SCBשVZTcEmzlyX݉b$B ŹPZmXzЈs y YxGOV;8q)~$`]Tv,yIY] Q`5kۋ|Az˼7f!d޽";8{C6 7]=E{ /E; NzܬhjZb!I-!Dd[&0dHL bHABNϷqB\:dxŧO?ք\{5p!oZlUk׵$<(֒qѲg(q~W/s;Su;I% wʎj P55EvVV:g?~x/za϶!2uļK$F܅%J%fԂR/;+9\e]W5EZPF{ӿQ6n>kJ梽=8;ժ)\Ј}#EN|-w!"bI&UvxI9?hlh=w:b>,N'|ȈP~?3%dī))#u!p\e%: 2iܔ9MJ8ƀ9:B["93~ͣVgZb~ys.{:JEQpk,h(swp<2^ k~~ GVhOfᷖJtr) \9fymwRiK_}EkZX~hIT bfoQxg&Ug C:Zǧ&βsªWȮXJɞZ?*ߊ.>8F뢿3@w"ÚBAmoUh%e+)ޕ5qҡٲ˺爝 +Ɗy(UyH=K$˞?HMVEvERNVU(Ñ@:t&ZG2bTTnTE8g'_PfxK i+=[?:@Q-)Fh) ~Ț2%9n߰ʹd-gR*aH?%u:X(YˍkprP׋ygA*\<g{*fZJLv_\5HFjI$ %6L  Ec Dc|s82lyG_+(ARʹ Tf|.5"EȬꈾa;[M޳?JbDmfWRPPCȨPrWIʡ}x(B*\&hX5RA*Պ ߄O)MR쐦D1`'TP$8kc|{c^wFO׷W.W~K]d%P5V*[y׈,!թi2]M6Ef:XeP885}wsaۻ[udo<_ \ÚɡsJv]jId nMaopwqZ w۴ZŠ ٢mƩ}mWw^}9Uu0gwn@GIsl㊬KLk_5U^;=lPD[5ޓd(B`Wuِ*`~kJ!jQ d-1_V[)pM,`$>|-urhFNM2+kELuM>>S}Ba$b(RW9M"veÚcEa,i7] UkUv}ЛOH'W^'RJVxIxMBLJ˝%U^]M){~4JELշKZuPfhA=Jf2[-Ur)U8[H iBȵF;Ys5ɚ 󿟿=g}s(/?btP3ު>} p݀Y㷟Uuμ^śߋIR]x>]Z6Xڕ?O?&v=OB^wӉޙT+u^ݝܓVwKLt_D:```9 [ӱGF^V[{#zN:tu~l,%竆>}~?jB){lalK-{GD{E+;]m]] #Q]*aT#>k@RCHe$() c:!YH!yȁRHBhvI+́U&Q]k)zZoB#mA9nԖcmwk n:)zMƈZݍɽq*~6V.$~@a{k |A$6 -(Ѿ%2{r4;a+*"U s%%iVf]ͰmCYE:g 2Р B5r`id5<O;tZIgj)zMM5H¯}T 2 TlHPW$jPJrK%;iBRflUq\ZǕZ&qF+UX1FGf{S9w-+q~pوB3Y{MB;bK(vCU;wU$gǺC>p!/ȿPRUڥJ1ߧԜS3Dg v'9(wV(d HؐA$K>^L#txku%lX†MVHGb6:|$;[w<V ]|#W} rTö%h7Qs7PvDP99Q]Av C GvFT9!;;RvdWg5dw:.+՜Hy49ҹe) od4hȡѢ|NN󎍶] 2`JaCvTgOGP:+H !y2p{I>нs:%ن#9 ʾoq S%9UcRZeݥбar> 9+E+fj^*0~?+_!$0O8EUE[; d4Vw?jez2 ETU4UuE477Hi $}ltA4:*ё :_z\VtwK*]iɶdjL'd.o-Ӥ5|۲+!DOzD왘,?=(Kxu5Οz&c9ew \'F0F*ɯ͈g͏UzVZt#JڧbʱU,-}ޞPed[-BK]X[+Z8'AZP!u' .%dY(xy66]q:9VmGciU!%*&ީac\*t:J^Td/xP$ ]LDj2%ڨS aP'[ڱ9v>[sYEjcbnIYd-;keuMbޜ2CrlMAW(KN[?ǓMlr^3p ^ qe˜ t-Y\orjo?N߁|0 *_<0 oNHq#OkdZ21 ?/_30 SF]T7ڧ^`*= VN{/IB,SZXkD%7o%#*%]U&j j ͏ ƦEBHF kLmM +B oZAEE^Y:iйPbزUfEлn9CzϦh=VWOZS7:TZHIA7,}S7AnXG+'^6;PnMX?ǥlQQB3e8hQt@[wS]I˧N1;P9ʹsrUXGevu7kʨAZ]B6,*B;g ;hG~-SA^庛 o*Av1ATF &gFAvmP})D*4hw_i:vJhD [߬( ,ZՊ@<hya|yYW]vUӶ=ѰOIӬc3jeVv65ǯ~ ?o߾zҡz:w~z /ջ&HUnj8.po}ćxӎ/W߯}~)_OxO_ciE݁H;i@C<@sģ$iZ9s.PvA2H:ܰ:vi :0\_q|^eΓF8-LvAڽ@}֩Jq/V-=gWM6?ϊL:u.RBlgԏEG'NFy9|_.}Z!8] 4>(r=j1"UYfFZIVrM>!O!* 8)yo4Xɐ5KyxJa/}?Օ hlZ`I$,X*)>T\9k-c^ydV#QfUI| z λ0KjSZz4۝5ޢ%zת3v{G<_u*mv_`YckF#m7e`U_Uo]ww`(m>x!lg|*:ڀ(ۂm5!))=ZXoFy|l<\1"f'YE2׃b4L.毾AwZt;ő`9~O$au _^|'tyˏFɡ% ɐ΅h(k FI0JVDzR )'%Ԧ4pD G?F=m7rT&nyzCO'`-Iǿnm3Oʣ3$yݽb*7IK:~:{qoxg5O$kѸбqC腫soUl4B 6OД>CCFk(}.CD{NIUMK1͋QzA{F,mexr+jߋ|R\W^y5M쎚,/.H[JKMޔrO65V.ާYڎӖ p*BjvwM.k6O۲%ԑ]>0!,0ER-Ijn~z} *[o*v9wnT2m&L6+tbʒFCRV!=lwjU,Gi\Idl-?˴8$A4a6 I,I,JKބq>vP bxX`u02}e| FZPe`u =BizV.a 3R IˌгkX7Oi t(6Y8?8/܃*3V~ox+ɳ߯:۪Fe-YՇkȭrmo 8kXcK U|N{TJ~vH0V81:@JuLƅ|OZ' Qªj t(JbI Iҏ="@5|r{,L@ȧ|r/Q3m+PTjN;]zҤ^9b Qrmo3g>\b7kj)+Ϩ7=OEO}xێU>ʚ?*Eo7_ssJCvzwswϋם1i3Rqq9|ˢu<1a +8Qa W>y gcDzG)!GfwJ,;s0BcЀ4 k5 9X\C~s,`q yÈSr Q5Ezu@Bk,`iN}ҡ3]Ү9IG UvixT$l 4gI΂7"H#v ㉝1C8OͲ/H VSB#8ؠ 4Ave8\%8hrT t2KZ (AZoڈjC)Ҽ X.A(p'U _MO Lڊ- kA|@}(4δ)QMz%ŹPhS +!hӠM إÈcwyf9sy6H ^>rwxwޡTlvSsQa5ga($SN7 :u*Hcwy蔬ybsN1OAN5 D1CZS<*ZYdwR. VQαK;#W)I;%8Qv~$k]]PvJ#@á*+YNK#ek"V2.GF));<@<& 킴{P(#jTp?ʌM?̓;pGya!K`i;W<Tu$IBHɢ @kMfws~:V+e#?}:~o'#=45N)>mF}UXK}̧ۛ; ?Mb6/˛#<*H/~pcDȲD"wYaS浱r jMom\OBd p} <;m nP D+]3w7C^ Om, 7i,ݕ&;D>z4c]\ZtLƞ\}L[]x?6bZus6m?RY!WFpj0lVr1}EkEi$@iT_a)w)j㪒Q7{M)mJ5ՎϮV X{1GޘO6x-8n .!7N<*-߸;J[Z΄39Sߌ\eAuS4X8^jRn>hqC;>l8i~yq1U-^,Tbq6]@[Йy w6+RLU3êqY)˿Ǔ=(0oyᝧ&#e}OsCb&T:8}6ImؠJ;vO1pfG`_o>:y;b,\RD2Ҧ4V ٓݦ*4yr-,*&Ft}!؂haD)m+5&gO?6{dO)pYa,q Đ 8h@ysOӬj={sOX0: ffXavcR$_ZE'i~Nl-Sfw#ڎdN+a{tzPKEKTkR6TT_V-B\NX R>^: AGFȣ;COFVV/QN^jG+g~vuë3bšb(M"sdADN&l>B@YjJ_> &:xIlEoJMi'*g3OEW *N1D`h-D%5Yn$uJ5L6rmIg sEfTVe*XW)L_)1q7ΘJ^Ȓi ܏q?f":+7<}>+'sn`?**U97ʯ3!^_տų3?3ִ~uPk0 n}ɨ`|G+fF.v*P:HQQ <|'bp J(vDP%],O+X5yVZF?oprqJKNlh:t2ڝ"Q^N['kR JS2ԀxKuY&B[D)KfOp)6v~E R"]VQYqQ,-z6y@C:w๘ o.#7g˼GTǐ15d?x{PO Բw֝6Fi.ܐ 7xI"D'ix:Pp0iO T߉<&ZJ>=SvWa |2!lWe !qH6NWpT3>CpC;~9]O$o(b:12&"l/[I}:\3TM+ #!3t]$6F0:Pdo& ,~3 DS+"#ѲD#0<R8y'uFB(}}VukOguw7Wy<͑2^S.3`ׯVϼ=5[d˗ˆj#afZՆ{QoK=yu]Y+^rl8`X&k!k&QF3Rt #.OkZ">R7*VbmuaQfcΦ#ݛt4]\ ٷ4uV) IL,X,4 (/-Ȩ^dX^s,uK2a/EFD-I9/"+01nj57~2+wOJt|jɯV(M-U8㘐 mTm/'g\rmW}J*N²Q|~*O[2TC,OĒa LCSP؀'e9,@ $8>(mb ըvW'7jb ޽{]=QiOMtU㫠_};Vh;.6q9;Ah Z7{SX:毗0kp~[:` w]f¢ , J7Ը5]nzZHSb::~z۹E6#(3C/N.P'fkRVyFGPqMQK&<5+-wX tnQQb<ɃY Z;MeUm:_3&dSw-1T UR+ HCac izS$&B+RsviDC&HJ(tsQjPǬ?9IX-g|mXw%grwnXM, (!E& raIל>+\j~غg"}}>+j>7 {Z\L~|Y'ǫ%Mcr4DQ aƒy#3 FTR`&+ > Xv6uL-u]yQKW%@ܯ7|c2\TF/*\4;:W-k^T-*w+)mBnt1El s~e%y zS/9i努|2 ޹!8hFcΠq8:0e"sG:y[lWRc1gW;8w8Q_@F\uv?L>e8S֝?}+tu,^/1Jʎ*RHwX6|+y67@ԭ4 3\)b(Ld|C #7 l^XLn 8HS't.g6Xo9" / +2|1>ޘ,y^ 4FP6GV~2Eƭ~e2>M(M(M(Mn45!c[3 8C+MkdhT1@MC$L9 MMvsܧx.C( 3& :=٤dh|KF$vO?ؖJ=$*5W'$@!V`-YxD *B{VǍ.=]/+-NҒ~NjI 'Tj0B1tBUM*/w إA5G>9-0yIPJ/f#xaiM\,EGݽn~3ͧYQ &>Ěьr4 ޟHɠT뀏Zb7Ͽ=lޏ6#!Lg2nwF D?)\?A+вUE~^t]w34 fB0m; uUǭM1'5_E N4yqvt`ge!<YܷCo͒i#X[V\_ s;6~>$9L`^+"ILzviCg޴ 2`CO'<|"b.ט )9ɇ|!S{3n~'&h.<`,X X|,r08VK\0g!YT:#se/%?}5KJivWJ+%)_hg.pÏˋu3`~ .FRιR36A&M?ZQkn)Oqڈ5[k6ҙ鮳?>м`#i ܫ5fs1!l1IwO*8z",ń֟IL+jNUTܙL<#wN l3TwC'p܆iF}(6&iRF9' Ud\7/d.] @dlTw&eIok.V͕ rNEł˞̳_jˣ:Ƶ` O5 tF?P{KL ͂U-o1^JN#qRR,U(ZA`_]$r1 #y)r~'̩yeT윬N3BI ,)R:opAo;M 2{\g U=X?fXBF1'1;˅"7,ϵ1FaDs.':97nQ?}cTp|WwjRpnF'KulsɥNı:C ԆL'rhE|'Dޝ[~܈/o*ɤ1ZK/L |YMGh;t)'O!D?_ze 3WyKf&>=qM#ǃ Ѥ!<$$Wp$XPH]>܌+ 5}7Ak!}ˇW>fMH| sRL$]?!JsM>׍ME;Ԙ߶86LqsV/ZJOUoPMj_t^fw[fKlǾ;S͹'æl[~ٶV>gͳ}X[ٷgga|s앣0lNQ_ t$M jW,V*(v4\6$*lkS;Zz&"S>kXK`0Ht-#kgk}lmK2'bbAÊΗ9"?'\*w;-}_PG%׭|ɃRB֔xVj+qN,0M :uWØKL9O,)eX@Ac;gFrlWnHp2Pv[E:۫2vHFnjCtQ0`~gѤ?8ۭ>Z^ds`?Տ髠˵6K\q_v+g%wș, B1B_1t~mK` |ÈƊ,Î0ЎT9xˌX ^o,8gN,˵Y6TdxLE1R)87(ij 5@;3mNRACPFRl*Jءo'GxHzq$_.ɍ"T!Cgi5 Z,y~MΕW8x3-meYރ+TA ^`f|O_~{.|2l7ݺl55[L-N"I.KG_zU E\Z?gs UOw &T軷?u9U_W/MF]/{7g֌0E'JӾ `]w[wv2k%DMV4{SQ48#VnH$| 6u.T Mk%2Ų>Cz7P7lmȵDYdLA'~}\L =37vA`Ɗ\"k.TL<}K:ݐ:nJm7M\G_P4:- غ21XQSz%6lCʴ<8!jmϸ"(`&]gl4ђ([f hFK< (A+Vl\+S% @&q孍@MU`˽4`d"6T_YIhMӚO_F Ic]rQڟĶò7;0m30Hg9#8Az0Lj!3hLDVz3YteXcjwkb{. y>e/aPzvO6`rtZlJP4clC=5StXQYPYP(VE/W]#CTf(3rXJa<7r0v^#كCDzӈv9[+jy4\3knCFf=s/ap*ÒJ 4#QF"mnC 923+1Z`Q"d^[pFg9@zJ +|Ua_d"O5\ f4u@(|5\UYucI`d"+jU&VM1&Ӕ4>٭W F1ʀ/Ø9 ι!ewTMid1( 5sڜ "lr8?M{uDE"pttWrԱhP6t_g1tޡtTS č洃oMxкWѸ o~`zm~[:еP&}]}6,݊}똶cePJ(g*Ja EA {ǸeՀ tqݭOwճB;֐vS%8IQG!ly$hDPDžѽq㹣R35`޵q+2˞kQ@7ڈ` ٶNdKG%-Ό sԁ+N7UXUAai혲+!<1r^J% ^ZZ@re}yom'=6";hZHGtSi"[ۧh} !ף+{BGBYf9`6+UYh z> F_Vit3qe+G6B5ь뇙Id>%ߨ`sTƀ 3sѧjyΈK!5+HD\hqtI7bPSF6՟^}\&L,dVåƦ.*82F>[WXr'3%א)/~TZۖ~4$^ Ax+󡧩HSK]^c/ֈ#Rg9XFEf*b j6bFW=Hh#NJB+bK0A5#xnonwG2,Cs4;Rš=ʎtp'Wi9w}ә9krS:*6͙3@,)ˍ} ,/ukx Vp ui1̪ :tQ;8gojj.y-*lɼT{nC 6mkZhŠP!גRi-ӱXs&;j\Jm".m".SP̽_R <drZVEE(dM !L dJHb)S=@U]K MV$(-`T҂2%Vx_Nhޗ[[/1Rt eEdnA('J-)"q&$x*b`H2R5C-+gKe@\`gG+ 6B؛H#[У5mw4LG,YE6.2B0x`l<餬Y LpQZ(j&IO2-7#k]h6Q^.6Ԓ|Ibcu$$s MKz!RlXk]oVk!-aק{"tf늱{K'4 `"c)I+6B1n7 18S2Q)c`[Lux/y1WǍ;YYsW(:)7)b-K  _t<~t~IDq\d;#W3״t*na_:k\L>*, 8瑹B0)[y$YdM.ЛDN"tS6e9( (0lV>^x*(FN]ȁm!HdĀ­aȝ^Fw>ח\#8O':l:+@>bgO;/5Ssy=NmnqEL3E_ )gE3O:e3o`ly&1}ďht,3] Oѵ1e ]6KhE#hdVM7>@9faSSfB1d@zyҭ ȥ]EhR.vkOq|Ib0^Aj,b 퓏^vm6,iϤ͗߼]wB 蠻⎍'Wa^@O>fW%jy56`q̛'Ze渃0qs1-2@) {U;chbU eP{ε(=(VZF{ _^]]8{3RrۻI'WW KRиP`9"'bj At n' xtx.9{!EIu0ɝ%cmJ謵ƃW@u"\qNKR,sQ6(ޜlt|zaǫz*˫׉Wt"vz akw}w/D;;2J߹?"__])pz#OhtU(e40]1~vg` ?-BFG7 {ұBGE]ER$aQD̔1 P$<_Ԓ?-(1Qљȃ/ mW2MŜ-.Ke!oA͐tPZRkGz:2qep9?Nqo-Ja%fIcʙJO[$W&yGS1CKØ -Q^Q'aGЍQ}N1Ou8IPɎ+'!ԞFNB.*2k!nͬ9BNKʡ]R?/y}~tz}Xk~it u"cC@Pe2wJ@ ƚ f*@8tH%C:qjz2I2UXuҮܺ:${uQH $][u9D[T*^ceF _8 U)j]' ]- eΖV ɗ" rݦP̮yxN1:@+480هYξN|qgSԌ1=ρKtmw]0Y`ok7G^׆SG|];RDWzqiM/ . .gth`lJ2YƘD)-ULBHP`ҝ--LIP+YGq656^$ig\_*_'}C/Fgq,3s"gs:s瓏duh!o B: e90P[ ;aPCʘSR^ ׄۤٮvBiMaJ}_nJtmvL+.|8puaUyc *%?}񈛪Đ:E44GwP7(7#~ǃ(K@ hjWX_]#Rxk6 0etLv<;? p_d칆\΢[+䣢 />Z}in*VFf<6Ss}9wiDՋl#sdc{T%X۲jW+oYO% -s5YVɬ}۫V׵;\i(Wj_ņ]f<#,QOZ M^.=7Z%z.͌(e!]Ldq 9m7WgqCVӫK *xJC2Ӧ䖖)/0Fki|fkVޤ@fxi&[6|P(mYR3|3n-_ [ZpjV?)jb9Q:nɜIepV,k܂5D]YIib+as= %̻JYw;2=첊u(szؔ e+--dv"l0_S\WCHl)nlWy,1sԮ8Ң6rue&ʰ˥ _ŠQ o4Ɖyl0pZ!Ib Ss})bY$nM7IdoNXrCo Fe"`t`0;XpˎsOt fA Km΍vYv gc{ ,{oƍaC X k܂'o܂9 k713>kNӯf  Qˎ0K~wfDDfu+W10kcI.T-KX%NՇ ngX䷍qpd}3rv~ >³Ȳx=oH6"#7VZ\$&WpG baඹz BNb0uW$wf*4DJOkҬ92%̡Ɍ?>Lr *e|c/VDp_6:Xs sT!nկ=(Hqd8#7})_m}is>BW۹[qK; 8f嶆K{<&az8?[e 5\ ԬbZ KX)2݆uu[fPN3|C(֮C1h9gj{^VIDOI {dW-MۭXlVybvk fbD_p%\|vsUWtl8gdl{ u;Ғ~ФԒxlmؙ2$UKSS}Jfv8"AX5|śJl:tU"ݨ^CkH(L0ByZ CYW0w |;Gf~OJJޯ艾{恛exa[B>-!:n,-)m¦ɶT30DOi& qjF(lөc IXvv=:Xq%dǿwb[aai֍1i5j#q7O7Lm .E5)/#>Wro#eCƕ\cr56ґ/c&E r%!QN}Z&@lf9KlvڧRЗSdA̩H%0`2"lD G*TӈF :v^P}Ê0TRn&X3+b U ?yg.'ɖu: rshN0r] H};KQ/?v`rkasA?L^q44ԙF%|ÝP]i!4Lc/߸#f/m}}>x/$=zOtNΨN~=8^Y?A _?BgGE?`cv{o<{ 蟻N'@>.(}-H/_z;N/A~2;:˳8G;>]](8'/`(њ4_ٛ%3' '67fP|ŦavE#{C 6wC2蛛Up|rD En 2[ b_orhM;Ӭ7XV|?[LPbܝrN,6 a\q`H"3'8 ,#!N%FObČ I@BHv(k;u"p.Y 86L+6-՟rK M^KYĀ4)/)]_l,nИp.NY_]S|~8Vg+]ZC rϔN/0j4R"S/#.FFۈZi %xt2YKˌU{2s*LVQ\xM\uJ`$\#8(j01ŏfo*ш['{ G^0u0xl(`H Pű|Fa=є珔oU(n9v1TeKY:1m)ջ]'-O`~c%q{K|||\+]{`β(IށTs*}%)5aN\ IeH"VB٥ME{ɠ7A%켙*,H7%ӋzJ.Ƈ:nQ!MjƝV׋k>؇E@}ɹ*FaCLH&ȵu9sNHKC4 !XU ?5 wv_i,Pq!)!X9HsVJlu|a,&LϙY|j+v)' O-N Xؽ A|Z V J>UV xpV virSW 1bg`mO(Q}zytH"r3;#KB<}Ė SccFhɳU{'~^r1uؚ:6jNqo Wu,0{J;|dASbtCc24!HǾA8E0Of U-U/ej2j~>Avt^ $ ىI 3x: \-" 4SM>3; `0K&z$:;`$@Gv8^S,gf68ϟ d-ڇ\e`3=?N;hz3̯3=;;}_u7bkfgY"5ȗӂ ]g̳wJ{ƣ&h9/K'n 0J/oo%vgŇ FEߏ?\;W7ٯ@Wd1Ir4r\Rg ٫2 zPpWb_394@(%WYMi$'.WFo$-T,pON~ܬ{zP^Θx "_0q(#̬5*r3ԡ%=*H:Z0K@V_uq%7]nSN`ۖVe>JZ2N L}"S H§a Tk"+`6JD-9&h}Ϛ;akV,WT7Rkakt:@hy0 S׬;UJ̮r9QtCCqu,%s`貟%S@C5`dT"Ggrc{^? ?tbo˺ǭw[3p+ ? :EġtP:q(Q<螠ubNj\'#}p"{!'+6;lAkM`At7#Ge{%JLlDg NوNѣEsDoϋJT -19 Y,/팮K38el@ A(B`Ȇpb?X*C}. y D))subTYGG*ҽZGz:RGHzξ4H:Rz@X p:JĄ(Bl0Xql #RܗYG^ @􏒑i}8d#ڂ#anti^߸%vKomhv_zqm7vϞ턦2@8"QQ1#*kvcw'f?]I:Pզ x)oxl2+ 5f#doњuf7fYG{~MS:]Qqd`>?Jr代GB@I/ 8`))"02U+wDw޼+3 =r.\i&Hy"}*O/ 6VZ>n9Ęxd)uL,M, $ܨE"~@ &-V!r20s)hFl_ ^qA/R{PQ q/_dV''|E@Kښ޽yէR#93b𵏠auy+5w1`9±Ps][s7+, JfS*\mɐdӘMpf(Zf4|_ t7az\pM06b)`">fc uؔD3-Q2EDT*#&Z0|CrCFwߖ1SERERERE]I`vZ5 ;E\!9qD4wĽ),oxVoJ- SrdTKE*IOZTKZA@xtɬTZGBd@Da|s$9Ơ04|4u#IPFz#8bNj466ھ+ҡ3[N֝={y"HdH2Re$giZW*ZoT7sV#rErL'KU9/ƪEhsH Q HEV̂F zHA30eͣ8UطF)+#L؀C)8jE5X:*Ƀ-`s 0BP&V -v]RbHpi]#)BSbTVJԳգP#?%YHzĠvR[#xT)#m++յЁ$vQ%tdKp7ٵަJS#MnN v yĆɵ NñȮMGĜG'oE` G dJR,X ᘰ^OZ8'i0m'i&$ XbZ 0 )E#=,ܙCa};dE38R_NT#L(9MQ |>w^qzLW  dV[f0 QcfaԨm)no{H\-(>ܮf4P(Rs93 98 ͒_WdW (%%2\ބW~~&ͮލ1ZsyP'h*GbRO&Vgdi[jjIsM/J<kUisp0ݤs1$c3w}Sp,ytm(OG˧ՍqQ{7}{Yh9A["53Bj,2zW򩻠Mp]~1+kP|t[H!GyHoyC.7BF ӫXFT p?w9-R`èHp b[qAV#"%Ge)=V,>7_Æz+do!D#>=WeJ% 诃%3dS0oǟF-,E/Ց;L 2]􊪼 ߍF﮿z8ը~ˈ6Ws} -[%<6۩~|x>IIxps7cnU ^V?Y5n Ӓּ6 ck[C/A({'01!GuYfZvM+[鵖ٟl3DIL)},j̩ G&a<8_}#PHaj]q6CwPwMi/d-[Ng%R#җ<@Fra: f|·  LQ +Ƞnո~c>ոw*+*'(&E4M<ټ~ht]urQ:K;n:$#PzƙsAvj?I x}e_y᯦l^5ғ&XoV^61/GPa,⥧7̴%@6ZWƏݽqwiE;azprHz$ uѿ.o(,6$ @4)`hQ_R+™ަ|?'X݊=h1ecf!q?Ft9QG@Q+'~}Jda @j VB:`bT3hV%F5aSQMHRX_g'FUNnt5bT udnJZÃփOJ@m}Fmܶiw-Oj`)k:|},A^).0R<ǤX58c]™ב$HRb,fAsT~sy[ZP!&=Zwy}SݺpB#E\ [Q!4ݿ6gδiPl~/>yg5cqw_f>[֣k38L7x@cuDj||Ń ,.+s/yyDv2O''Z41"-׫*\ū Y>k1x xyNcyBK,5G]{e0kA^]rX7*/ehPZ%!i5Xʴa xzb^/SeyǼ.WBPaX( *"cU/u%NY,#\[崧^FNE@x4z _L.o3i Mi˼|`>`w[|E:xtǼIVvV)V)0`:^V WhU"s# 1b$2p?Puj㔰\ j#1x=V[5Z9Va*ºqXƌ&ВLbkYb!CAH06`RY̽f(:B9b#J(ƏTdj'1J[KW$M}kfa.of1v[]1He7"oՓҬZKi#8ke% ~,aWjWo}r]( ]RR_3z >_`12Pi` phOqBp!%2C$8_Y}S1T &5"+<}95u$ԈFſĈ΃(06 Aы9A E#2$&a Rk%j=>iPG.JI7oYsu_'sS"AȗD<7;Ϡ!Sf1Aj PEdԩ T&J0#Q*pp*WJ}%tc4N!qUXwy,N+4uXj0ےRkLWIRpZjD@L6^Q57<39B Cɤ%45eZUrTBIoه>/}ig%!J\{ߞS w7 I y}cr?qI*5}߻|ǰZz/|< i"M<Fqj|\|UA+T(>%=G +&^Ђ! |LK9yS!&G?M~h0~it=6~VHYȹoTTu#pgd,M|]]"vvl9~>\c4$Xo~Bo.\n2Xii&w ˼%KG@yamwcC9J<J>-/~|Pz0A5AaM0R˦P2`- [0ֲVxZt0WS 8럂1 ?{J{72!\ :';2gG뎀_\|| BƎ"FkTĀ!0ieV y 1zB8'I/gKTT8S8&?Ҕ'P@R1@g>kjU1Sl6jd3}+֢<4FHfX@#yJ'q3&CDmҔR`"qZ2LYI(p\N:е2ڢ1cddIE%*$dKŹrYNb O]V[L<`sZB2~cx)5_C)^c{ $1WO/#AU5_)'0 : @: A-H5V$BqWc6;*ѫH&@qg Sub,8\N0r3k-d/lUXw,xm6Z+~)݂?=AKOo1 )E_fC  Itb1dpX҆T)!b*Gbd=6B 2:EMBB 39X Ai+>"ʤΥE#""rX&"=RDKEJq E|H"$ֲ2"F\HEVielWD> 9NM@]gx[4&'oL"ʷ%ʇKjsʇ)UjzW>e8*p|(!PG0$IV|T/x5xD,bQ)=R6]]Ǜ1SC9y5V8W2$L>+> g1 ",MJT$xC4hb'5ƹh:L،˷ko\~>|yP4^y-Ed0ߏ>X l+Qb0j4yUmo U\onJJfr*&o*NC7Gf2Y- PA]qBS@či! GRCf".P!c_nZ b>˿//ל\^"͉Djޱu@v{Կ~K[M<@.< 0d|UkokN(c ZĘDq;uUq3ST m W}];Ydo²g7.oN 0PW#i&#qzq?^eL63^8LтJCۨxzv fʂAw$V!lō+'/9hFL|.oY`XHەɿG2^Q]ޮr$Hyu m\s L}VTh:/xMMpTu*(TRu b]3[%mZtM-/3Jm:0Q*cvygblR|z >WZĥOd)`FE֣l3g֙՜ɟmI낅^anUOVɆKޱMV#w,WJd꼗āKtyVȕ>.|V, +wfyMc2t{+oG>%esd'%bc+>iXgb_S-ʂ5fNRDn5wLi@B~f0=hz m$KaHLHD p*(3*j~D*DVZHSg_:aDF2SGnPdѹ-0i\֚|0lܽ"W  xE $6!%Y9' ~~>Eo<_"|5*_t\V#\ޅKZXt]5`s_3m^s[Ry|Ymا{ͧ뫫?ׄ֜K&OZy/8sK\K-mSIFnn>/rUrwlU$>=~rA |Gs&VN|$>lXOG *1L|TgbkGx]zנd x;TgODFѮ9huQЛkIȪb#k鲣/k,I` (Ӂ Ӡ W)jAd0 9E&)av 2D$52oM] NK>vu5~47Z(Mo1_[lْ7.lH,+r]NF~4Gt5at8%yDJG*h#[|t#@c/NAL2N}N^w7"DR='jt:%JH}u$Ճ*z2.y6TeLZ,7zcyTr131k7:t<3df\a6ҼMUVz,W{s'+'HKAJTb~û%-ʔj_{sQ!GK GrhzQy0"hLʥ io3 wP* 'T$*D:J$iFf< Z#v8p٘- O: bRu m-M>bRv#,-2ը%u+^~_||a"}v^EU6xat?Y}s(Gp';&ۢWͻ4&^ǶvqJ8a7p$%79Kq-1[ڧ-bmLK+׭+M(Q(h=Y!91-xGK ;J_QqΜ5Ґf4! xj}&vFY)rmmrٟtqƫ2+,M .U6% MiGaRTZ/Χ\`-p?~ݾlI JD ^a$YoT*52!iʜ >1!4/RE4Eq0rQv4Qh1 dXS6V$n@),CXZ lvFSMĖ2۝@̈`KAD29g]r>\&.3lbǫM!H6r:j2@P.LmmF}uDZδѭtjj'u^CBv"iLM_ A!TAi/IPLVBPCmJ $N+E["sil Ym3r_&ӹud%Le0nH !&Tϐ=K( r\ XAR _Cpxg||{ݬrO_ `^IXBxet?+ (k}B96OHY #Dp}ӑl*EQMLq뢲epx~+>׶ Zl]s%zw犴%#\x sYk]/T8kA*-mȯ0~}jX Y?AKĺc[33W,j,dNKQB) WKw7mjqvkn7 @u滵vKГg2Dbx ]Ky\d;~y=K\#9w0!LՂ?Y/>ѫ'[:* &ڀ]bY(Ӊ {31ޏI0O>'I5t"Jy q2&nX;{,存@F^m6"jcH\P"uc.j6F4B@0znh=읧P^qB5$kC5-aܤR+cf S!FAJ#%&޵mdB{!lvQ5vytp8Lt#K)'M{zI m"g7<Keb[8*C>ZRXb)R݁⎹bYޙde_by ߢ;<LtqՖz3_Ν]s Jǥ\]ytsGG=>D;sl&peНƢ>:7Kn gۈ*~3*f zDYfǛ5+q}O&0.O+.(1jͯz6*-MS6ŧ+/?B.3uVypk./ğ4T|xy/Y_~ " |{w[?,6- Vf\ 痆U+S`y"v A/Nv/- >zSæ)]]Y= fb(_]w/y\J1FQL0:yf K+.~nVӰe"AVe%$+RAVeEJ#x&8϶ yVA~\<_ G݀) :[3MZ s .c*|`΃9BHfD0nCRpE89W{#Dpѕy>ql=<l6z>]ܫ⿞5֍sEM!B GԆ5kj8e*%4(I](Iu$u$ՂCa$}]֘KJɇdxTddhJ|OlR 26*=G?U`Bo94@­^QRm4xҨiKWM.ǒ hPqȶt ǿY\> MdG!~;D!įDVb@ZA,,gfg6F* y8|}R7S[^j-fPwH­|ZyLA oV_VY]&iݳGUn./~vY"} 4뷈$Q>w|3@ynaru0qWVQY4/ ፊj>ߣSt=LLqQ@۸; v]=r|0e 4i.O x~d׃+ޣ/K]>q?#Q 7e~5- AX p=LcT~ǭt-Vu:LE#vF6UQb6ńلI*IRSm$VL k_8pC+vS*UDr 1V.c ,PȨ45D`hRM*Ոa'F[fR yVť%IB0R)!)lyCoT = QÒph.v*ր%VQyTW{,8M B͎ u`8DulH`!U:ppi0lH!=5 D0t;\ :BCn1Ah+{q5aBc#O0ľDq'I'cM<M\P4RRZ7 +M5,G[ÝP$#ǂY s;Dx, 9"Dn7IKhbl rXKbZHBawJ*͵c0 D#۠Qy` BHF0DùYf,\pA3QI )oj4G)?_Aw^zzjĭtrhAK,CjrʘYy9PW}>܌ *g߉uGGq_QaC2,yV A+[k|ND[NPz"`VPPz`H",|! a(<, 4s"\wt'Hgf;ӬGR8\鶅ӽK.NK;!Sh9[W1 $8 5OHD"5 lŧ ؊}Ʀ`+XQN#&͗CNCi!=!Hmi1CPljs< ~? 8F2D vϞ=Hg_JsM%-c6Ih,!=-__6>wOq)t6PBE~ڴfBC mZ m'7i$K;ҪòQZ{@QL٨.38SMcdYx!8&"VDDM`LQl*u"Eʱ4FMQ! wm&ٛ r}imyh<_6uY]լiut(kbXyW qn!ÉC:fKQ%4UqbE8xE0)76✗}Y5Nvy@ dHO & ,\᠌uF+e2(pH\|mBgXa0 ֒kG M1/&b猿;-A]#BFۄK(Zg06ݠk3`0"#1Ó 5G\1VNoh*lU}Ufi gWWq"IpWYfqN{cT(no(.O(no(.{B% "MqB(PP\AZKL(n# X8#+&.M"$K&#tԓsSa( Ox=7ڵW&eoR2@ J,f[[d"‚|Ҍ.mFchgyj}?]9VZD^f(lBxX_G[b^zm:}zez]<ˆeF/\27_i2%ע=ؘѮ. ]F1g ĉi!0鄁I$$ThXN4IiHܟyU %uȹ0*T| qrvɧꀁ EOuʧj!h>$[6qG7ׂHdc6""M%P8hI"h֠Ac\)ױLlq)wXZ5˜r kK[+ZciɺQb$w5%g5t;uItUJ9Հ/eKȠBQil9[ٗ0sշn`QEſt,\4q&w9U)OfY2*Dsw* aG+|1Xz5ZrRDAN;YVN9DIR9ؼu{9)^F/[(E*6}N<2{o7B.EqdVGbD:Px.BWcbBPJשŭ`0L]u GWZp(΂CGW0:8:utՂC*]:"bղG]!bLrB:` o˕4ΦX;dbb(HĒA#0iʩL3Rg] oH+qݹ~?8͎df fw3F'jRږmR$myHXEW Û vBk4'sbX ͕K%I-)rj P\ZI!ՓF+| PnQpn@A[ ]@:.% {$MRMDկoQ]mez ruP>0~of :W<Bߗli,|Ł/k0-U4g\x1%O`u]pvt\To=o+f ʔFBhRt& ne(X2X%3I'LKKS㸔غdւj.4FQ턥ByCL FZcjN9s]/KO#U9A^aZCe+eU=^qRҏWR)^$ךR +Zq U,6[cUj^{l4O 64aSmzLi^6TJ罦4C !bM6`TNi&޳hX,!P4{M+5*8" "hQ 6 3 .-jN`V1ZNMy#gvi9bW^䐄Q6׷%* ;s[!e7 {L߇Rg C-U `2&uc4(SDGIjq,sf2wGj,1RkAjJ6!QRd<:])B̻Є}U|2[ʀ{)lE>]b)8XKI6/",^͜ڰ#<3MN^L0Us[ǪU88cյǪ )=Vjb8zzX5B2$ǪGU0Tw+zZ!:+\á }+4XtFphҕFw $z: k.d|x 0_dL XPyw4(~yz2,`U G?݊yxlߋ_JKG0wW#BN~5vX!<,ßOp ~~jl_○~;~3M.+)~jΦn|:s]j!~ϧo~X8oE>DlSx~h~ RԿ[9|u?|Z`vNyy|ֻ 󃗃F>ofhsoB 4GN 'o?Y:8}s29hlA{c╱`†L%\TOR<ԎJ ]a(iMR݈7U#Bʿ_̦yQSf x}3za/1l3k1 8Ud(#8L1]UoqJo&>.+ý;g~>?s1! Qq쭿At+csMX`̹x͵^Uϲk0J'Y`fRa@5D 5ϬU'TRPT\IV uk0N`eĈ݊R(*`nX9},o:,. Z/pr[A@Ӿm 7B5 A0@/[rވaMؐ*I`'lPsLj`%IZ QYϳ`Iژ`'a$aTfušczt5͗qT-n*XtRNC4 mduV`=:y:j yg~um1ZXִ\&@}4"suzi\^suIsuo{isuc-bN:,ZScM)SZSXP)"`̮W3d÷Se7O%*67<ZD @X=:wİ^X.c f@PqYl (`OQ@Y9BWlMCQnR\1i׾JEmO~V/\AFHxjT:rY JWb C)M8:5@@$/}\&@H"b>qMP9d>F;18dg/ ДcA#0f1In?hGw^bA`k 8ya 4[nkIP)<>t( ȀR]v-w}DBB.M8 j\_b+ܚRHBӜZXޘ0)i qڥ$!mNg֔5G+FB^*7'$9W"f1h0( ňXo f-ǐZ^yT_AF#B@uOgͅ@ԯe]rAG6|U2Jޕ\[.\. frqӌAH._ "^Y:Lfe+͊rP/lYp%?aNYN gnRIrjJ%x2ARM/8L4L5`VK$a4::Ԟ&U#f^l;A⭁h-B=cX]~bH)P ƚ G2,$fFV;b+L`gUx0]"k"H>CMi^q0 4rO쐡x(ւi餸ɧMҀÓviOͨ!W-x<#4E7jFGBim:Fٓ*ˢza%}f44fEae@|6ы5p(("k@KwU />wb=dY%!oWfqV[Q1 \8f Hfd=Cs/CglzXקXrM^R~e.+g9[\;Cu1谫$*ϬDgDŽ\о"F}8Ro,ȵE0;&DP}@Ovpr ;~32z? md*hwNcR3tX 5jb߹tw#+q3X.^a LYFp-uⓝ[\ɛnW[M JVNXĠBa}6@Lӧ7$ BlB'FB}aϢuʺ|x2;tR)[I.0`whrr$KV퇊*a[FWEsau[yєjF} eٻqx},Gs]i i_K3,to|-'wٞw\S/8‚ .f =`Xl6(V1?mc˿" n(a8[M/"V"Q(9NwDJPj"Hcל9sDҼ2&7[zT-DΐbZ&ke-{]mhUDŽ8w1Lb *_KJpd@7J2%B%5JU'B͒pooFn&Λes7:r$n?OB 9\8pL%/8.,{1=pjS^pdOiAO`q=ͿHe/yDo/0+z !QF+s׀axHt.QsLP:AH |$BDU0#^[ #꺅=r;h<6GJQga=|Nuf JFD~F0¥96)*m.XF>@ԀCBIN=;+@ܔ(D !3(,o\u0#7i寢҃pZ{Q8\{%;y|h2F!>o~2F1?ytDC*րKB VU?x]z2 OzEo2m0/kMnt 29QiS)]'En.\6jP8Fg2ɸNR'׾۶u39D*FJ̞iL9 )w;V3,C-q(R4uTc07uw&ܥߣς5c$+y<_nJyѮp>k>\7aнzwbzOCu#TPk:$2"c<\[.9rSt$7Ko?($*Q/OVg'UxdjUwS ;HHIAPuȦ˲`Bw;ETbK1 ;?L(EaPox[$}ֶl^@Eoʠ+`#!Î1ԝ! >-|b;aQ:dՁpNr+ 2U ZP jtt !%=[S ESF =h@B`chS _A'D@!\ZEe]/c1gTS~A)xs/Xil T$Cr_)H񫉟ek>l4]m fWT=Eařdu\^cZrDteWfa A<<'|}$%PYXڋЎYFKBj$-O(aKR ;Tjb=S`};0?]WM:.x^ tYQjQ>.fbM:O6Ob}`Oo}W%韟I;һ_OhB JD)B@]H~>` 0M]9,7'.^ ߫\T:I=ŰcɚJH)-4!G2+k{/>^/ݱC 6?[2>F]T8̡z`𯷿ŵ^p{Y?fQ|ڐ޵QDi=!~eȹ?^y~D۱b᭿ǭI-)X6ևh'2Zƚ-")jMrxO=,cS50aӢj^O>60H caHnޒ tD'N%8N)8(fafHh/鼈4x$3]9=B]BxC#w@xhUܰ|`5B(Ĩ.?KGZzZa6ǾH)m(aq[->8#4V]" e dBqѦJ=blƎހ?d2q;`C6\AW΅K `{zT!VD [ҹ٢?ޯN=Rpqvti!.Gti`IG6\F\C`H.0B}aiw18+go&W0%%\1%Gm1sUk~'6bF/3nK8W(PnHSS|>W9˪U$L񇷷?ɂl2GbPm^>LYT*,}+Nז56Aukvڄ燡7p?!YZ7u{bc:qte5wOC/6$nIN9ESSnGE/vu=,~[uL/ЈSfhM(\\Mӯ⮆:Wxx"oKB K :>잚v}ֽd]A NZ+-!%J--:fG=J$?8jyiԵXQǎN?9 LBIe:6j8l^8Gk2x'ql{;&>c̊w#y+E;Cns:I#aBH jm+WV.ݶo5J~>$>W;Ł/!H/%i eE@'0HLC&6ߨuo EKlPaڲ7cEk ;32 g:V pL`P;)%糭8:j9YSta XMev$ychB\KL̰}(գ'Q"eA@ϐ@h#"4PVD-G?aۄrEIEDʾaLe@T Qq%%.$@cG ,VͰkm `vYn^zʴG.KD5\7F#'l' k@/xf"Rmq?.R(۩DfpMe$Kj.b˶ӨLqg_ssRZv=ɮ +8A^/cj!oB U;V}OLm2\Nc'[Fl6T{LM<VJc$0R8}AB#L8#X\֎$Tt3; ch4傑J٬M!V-7G&YNV0i*p_N9jAnNsy33 1I~yEkL HAHv=s7cL ~t [O0/ [A84$3rl^QxE0;_>fI\7+UI*.3L`IӚZ񮺛?Ew°;K$~PIJ+ٳԀ=EPS)U%$:gCX^ҁ3S>KP:f~سU?E@N(8w=n׿"K4}, Q ^euYΐDQ4p(eaRyo/>ggX8ٝK $Cq>['^fbR |=<^, #L4%J6JHLt9W^:YY+ O aˆH.׵HR<_OҒ|6 N!cKs06EW j08Ty*gix)j]Omd>@,|WI9xJ>,0׌M% S@_@c0WV) co~{sȐ)Y8O BK,JNC9~{GS_(#'yΟ>XkfH1pAݯj2Y*Lj$P7Y0q*á 4 BJ, r@D`(>jjBWT5tȧ+-$ 1K2 8T9$2te9n[{ln ŕ b`j?S& 'OnwSʜUJUv.+e@-/ mVa7 1&:6h\N@Y3B࢑!̅5fNwJLڃʨ QG%r5m5>qKy=*w^8.A9_0sBp M> O V_Vi<k>6Ì J[0CS=M {40ہ߻Y<_Ltt>t=Ԯ6̢^l^tw~d!/` _0?եycwymTtFVX@^moyƊB\>~Fx業|{f6O") Eݘ}'JRJ6;L?rEIk>XHBqm-S ΔB*kϦR͕ )s.LDrsnOܤF)w Hɀ;R*l^4^vHG_%q۵ ԟx:jHNϖ,V=~Ӭ>ZmqacH J9h٬ G9I~~G'kuow&l\sc ~U+e;jBn͎"D2xu>|Av' z2IGBxz⤇D%qqtkYrO ^/yo{@D!c<]9d^:gQ0A{j&ep##H!~OOHf߭ooBs%;J(}Q=l< |}u^:6`NmOFlTQBBpqҧ'z7BF"*~G!3#`4Bj)a{Cl0ق߲.|ΡCOzsaM@/hvcQS Y~An&ɗJ9f4#t ӢY]E^!i@H۶YCH+|2(^IT zT 2*J4Q.k$Wƭjp{E#>^BЧ0P,:4S_j9w@\1Qؾ6H7}J( .͋Dx'9쏓M+9lio7dEw+04+6 ^m3cWE۞1j23G $idq;1JYBQ:pQv=zM8&\K޹za1%v6m/uSx%zÄ+Ƽlpس#{^"B'7%vF<5e5%E}c؅1D˳cD]kK۹.RJ`G[0SN.̐8ac켺\s3 *:;vacv0dAAHS#E]] SAJX2tm5Z;O~>C3F[݂Af nK5Zd_UT &n{*/= yZRKjqd]ʤ門&Y/!Y+qq 7nӇ'y37ūo)m #;Fެ)MR O"G@t_W .`l^! no*Ӣm)_Z{JD칭ՏYLV>QU#N3pz#u2 EjHϧv!G ꕀ c@@D$LbN KPS@)ԇsЕRoW(A{tU2 ٿwC=dJaz7t5Fg*%^H/:L$4!q Q L$A&Gb"$~(zCekqiϟK eL\ 5H&0@'Q3&f?mH,@n9k)qtgcC+3[VDĐ@(b*?B'`C c@8]܁>"s 'U.&qAFc[zTNw䚣'9Ы9'^n:*^}xnhKӶCXm` GSa6Ba@7$i@Ep>ͼ]FdG7Xx:g*4XE HDP/>a1x+7?IhOiߑM$)oPNfzfۢ XB;e a1\Wޏaؠ~3Ӏ@aIFGO c>cSyMm6A#j£vwI09o_= uR*FOg')8YK?S54)Ne84QC8fϙ% 9fkTu~x|Hc;Lq,T*8"rR*)52-aSoЇn Ԃ)|3.&Mz(%:Oq7H-T9)| j:_~-QXJg:#J,5ʴj (mݴ-5o$}H<YO.jzݩm0f?EpthDszJ(vkaqL7__Yh~0 ?=UE~"?N'y|~{MN^?ߕ ;jD|^_=ZC͹G*jÉEO - 0"Nm`HۗAy0:p;#Bo vEmoa0_s!([pj%02cֈOG7/cD96UذӦ17U 1-9I9u0!T/tHJ|3f#ISڀgV6\}SޏV ?`ꭒ3-;D*>G?Vz[,7Al Xm:**j5րQK5(z_(xWJU?Aѣ Ȭr2aJP{J`?)}Bfo2)_}9y-USvu`!l 3F7|юte>U[8Z,ZJڝg:,voN˖km%,\@nSxקUs[>Wm@3+$߽$=K 0~ 'Y$p`*/ Z `fv{Cn Cܛw!d_o_pm+mľreΐ+0ilKR#tq͉/yA-?/o}~c}}d2BH]&L "W"Jfb) ( c'y%'4|j)s4f?]MVFdH6Kiy[m(w{OuA5ߙ[x &W;91Rf:i=EQ*7si2?66IyL/o/./ի/ͲJ\CxmFxṀ @vɋQb?}%t;"ÜdJ<@=G]o4yXBTXMǝ"k!s"ٶюJ:nhc=־nCBo;2|?+{E0,_hPQVpmpm3R3jQm-c񪽗"qg(i26+0$`ti׬! ?I$1Xr?94޲;NEG"݂}h'Y+/)+R^Ȋ98G */A!>#&첷vuL б] YY,.V $㴛'YFxs7-x귛<79IPj\==)ۍn?=׸;&mo^~~y`|;~*=쇧 ʺ߿wr>>7/Oa7;b*wf+~7 .ش"ߞ<\08Er h>|`1po wLjbp4mt:{scӜ]@,:ih!'ϟ-I f֐h]*{#0Dtqv*l B@2E2'-sI&geߘy|c7yY(, Y!$R(dE,Pq"$ Eitey,LςL![{bK] * A@OÇnC +" QU˅D&*5))"Z溤.lN*~,zGWTz!BZBb^#sFǥ@Ej-LHp,BZD u8qE zW ײCf[/i3ZfZN{Ir""Lw>YzC+|t0 ?Q?PASDHC^[L9iaJ>`(,~a71&>4J !NDuzT``J0R>NT0.ǒߑ5N x iߛ-5Hs i'2g ֹB0rCЦ>UIX}~|$w:6  =HG=֥Zߺ8aJആ~ܵE@ENt2l˽CL2?c.)"8xFXYfB2#JU!tSm 6'1䈧iCCX1 {?V`x׈m?/ELŹ{`( *oDl+)"T@ !fvÇ` \ 7cA$_B"Pjhpj\P"zDKB<ؑvۼnm.. Wl2DԬ@UɅ'+̱94Ǖ^[uN!bV ՠwJP(yK, QC+EI%.@d*+c%^z.c+.H2IfLJu8"hVNN1Ӯ=9`oո)[ ITBv Y ,2f㲚kDL!WzYW -话ڋOMxUuu[Ol~5۝bBlWjy5FR%D|)\*_xqM~TO'yyU?/Lfd:+foJ}zҏxFo/ YT4,}Vd*Z !X5-8NO^ j|X!s-,%tƎ3rӓ??.qY[l\5[\Cǯw[|GE5ѯ hL1+sȢg3FﲙIg+Fk zC!`dз܄T ;5s*ӺLc)R!̯PC81DA,9M=C ~0y:`T 8>o]Qtq.m;^lwmH=xS.,%ž>RTD]fՓ#AjXcs {vh {nh RC˅2"NAI+]@bUbͪ>Ī(-!'bx'tN:~^tMǺ5=,|+gF1{'TS9Z i-W< TU# 'rW@ʍQ:T G;yhggw!#qcXv+cXDPajhİ"4V ˀPb y78P  Z9ۨA+!!qtt#~ܘ F`GgF8נJc6T%wƎ-ç"~ * ]TB{10NyfMݦN_V4Z h@Mn6Q]K$5j fLu' ^L_fiٍnF& 'o2}zHm\6aOzp!^KqrwXwrx7T'^&I3g &U&GB} G-F3G]ٿlf+S@ %b6Ff+, ‡_Wo|{o||A4N|D% eHPn *@H(p%Unj HxԼIB_yw$-\oW1ѕgS<,fCOt1`<ӛ?ørE](~}1P[2׵x|{[$y$/,f1AW&qSX52_5iڞ8iӄhӍӽM0>ϧMi^v{2_Eg L͏V\t`ߑ\Y`!, "|D K- _|i.^/F#-'(Ϳg&Kࡱ\63wax-S2-b-I^9s)H +ݤJ cBnb ۼDCB5  DuG(v*/(h't1h׫Ŭ0s0eB2ށP,>Q oPʐ|:JDc`/s!d;o?Ka8;,$%B*Dq,d)C:d77DȣDK-#a2(~|yik0k%|AvH"$TVağa.Bcuq[bY+RFlns)8D_}q^ `?zcX{9^( zBۊ0[n/}cv$4'qX$LyyZ]zͧ ;=58e]Gݣv,ϲc@9Lb/&N hp :7Uﯼ78ngwkͅxw7\*,]T-za>2[L%_3K.0hl pQ8%p"h!08՘s9,ey|0Q׭ZUF1zQALΕ+w{lO"6:oŤݗ~KҜ<=z0}42vw{2fd1dbf3SV v yI @L?G6rKЕ}y<1#af(TnGv[[fos'6M` /gLzuzwf2 گIBr-/SL)Fin}{bi~i{8?~zk.i>'ڪ]l_Vd ԧ yu ?ś_WadMj%Q&zA@&pFV5ruF%hMf`F!qV(hPi[ZWVikdNS^' fe]ǩ`iHZN8WA=W GAPH< z &\NM |2CY,fL{A6E.DJGc Nl0Nvy7ͧRwʮ0aPx,RL$Cc%Dk2@FK6Zaf Fvv=Rj,v no+n, Y,xEng!uO`̃ckj?ْ)GWs,auP1'f.,P֮2eo8vmk[Uey;="$zxIbe1=PUay2^-"o&}Y bԃ}baJ,8 k<_iكӣ)6-/95>nsi;d+!u:Zaԕ;"/-Sj=AsSbpT + kJ{˼v= kӲ= 'ckEZ;|^oYηNa2WӚ`:v{h5aG[ΗΚFSfz*>TGOF4w*KT6[+ӹcS'l5AP N>r(o9uxcoնRK^(=fӼ-:17;烼 Ϋ31;I7"#ZfGϧB`Fl_9_55]3QSlCQYN!!Z@]=9QzV >r;:!?M2x2gw҂!+꘏ pUaGɁWL0URՊ7\զwb?VeoV 0x~UWb_:iȀK X$<%1T0P02M i("h%H(Cë1P\)Mb.sT Uun_ (ʕ+|$!HP0"%B@>#`!#(AL2jE$up%1] >2حk?׎+i$pR!TE4JlW`ABD21M2JH 9}w&,\Ė\O[̲2dKpDa64`yu=o\YG(pn=8V v+46[A>[㉙C7gӄnZVE]dX 8_LmEҬM$x!#S݉ˤ'%kߙyY&~HW.UdJ*oسnrVvi*It*QȺuD[:[$+*2Eǹo}nUy":U(bݺ\EP+OѴuLuCBr--SB_pD\N=ڂޯu9&:_&Om٪-ؐj\1M H1Ȅ-ӎHBe 91D#A-BH0~4d!7X3#"11',42,Lz,&%P!J8W|&O?|lkKy=y#ƷԬ-݅q~> lA%BUhvvڋqe9Sjkҁ"npGnW %7f/ #?/76R^/F#0A鏇Uf/],b/^\)4ܽى,aL[X6 [gu=L/Gr/}/В7ue1r#_hp$ Y3.E=!@"ŷݞ hRgwa>^gҠ$B bъ\ Eibʀ0%uJ;XP(d)Ǿy(aaȕյjVj p-cF<6eL$Ș}-ϰTQ`$"ܓc4W424R(y$B# 7L "IڀImTcE!La4G`nV-W >R Ǯb ![cO|K3 XZdT75@We-rb)rAMMAK1,ht+d{h:>~ڄȐW]ݨvkWbQ1DGJCT?~ppq#}XݚWcŦY,w#}X< 7)&ĉ͹NFkh(0}X{x%wY[0kAɏGk SLS(Z5^ h^e~2֩۽<zfBj|:N}KܶHp95>(H!;Vq-ڇju7hQ3XHڳ]>;mTPuRL3^;Ε_֭èk!D9OieWGQj:x[?YFw݀aZFv!ڳ&u6Zao3D9x*q/U('_&FYIeu(JLg2dqiQ1q,tL(~"(N2oĖ:k +TRmMq%b|)ޏ{…[8v6z63b_^ohJfsfj?ٺ *܅wcc&ׁȱw3lnf^횖=4ʅR aeZ۸җHx?TWJ⊳-(*z9H C 2Ip_4啺٪]:@Dw"``yHǛJ-4'a $CA2 w i1jb{d@RKS$m?Bv3l?T@dL?ױmfME=>I-_t65Ct{EޑSad%/6 sov%Lk6_ɢq \h*BD8,)RE#b3w0]$gݦn2KJpG/IcHޖX=")LpoѽX(b=im#b!Ϟb:;rD D%l(I6Bi/yE((^7 Hc=+vf'5rYAE{^K$ /|$=4z'K*\cE^iZBsb8>/0d~SWs4U ӻ؅OAIA/7ߗ7xo +iRGx8qFb@Xp@Ib<1Њ<- !$i$QRy>zSΪbg|(1&`YicQkv6S}v{"Ho4WN)~qFPUAm₣~$%Kz^a$+C& U8$TT_SݺzHkxIhn9hз~`l#bWVB+SvD'hKĥm\bi S@)"ώ98B<Σ'Ң`_iRHS|qėvNa3{pzpUJwsm_EQRA8ZleRrJ-"aGUM__sOʳʩW_AɨLcEgy}Ξ dNbʨFQ.QK9'SdGJQ2$JQO;iX~U3B$2t #?n./N~3ӳ؞->+ٓylIᡖON)OEN$^ræ)yor!FBGn=B.5@:BNO^GA#S^>)ӎ*db420Ain#Zb<_XŵL0E aܩuo =+.RBl/Wha9a婐>\73T -Q|q -mON=֩cԱSJOaJz_6)L2~ve%- W7j[ L[ي}oceH}5F3OXs{?8.F\fggH=E+e0'yhnG-_7[z /=yM,|dNj[ +SȋMf˞fwEG"8T:V0pv!80RJ('/S dN& Γ%zNV I ~vz=CPONvxr&4]FPqqq|׊CcLyn*mV Jlt8h1@$RB3|g+6ڮsq4YS@}O&&q ]fʭ-;/W{\W_la݌xk5+ }-sV_6XSJXxgZX=V(ąWm_C}<{uE (-%W'r8*z}N "X95 km5E֮rH*,%%A:d8a՜J s 4 - 8 >cMj֐.SF];s+3fҍ.O~)w'-~[l8^66:M)4vZ}%Wrȳ!P>rAec 8ـ (:x!ta]Cv)"x14hb(` _T*LR=$hG22;N*om&DX=T"4XA퉉@ɅVk5/n*GbJٟ$hr ƒi# mxwKL,&Zh^j^FAP+a8xE< -J_jU_׿O\(L{4G;IXYP OJz IlC#1uJ&CI{NY#C bțYvVdH6=+E̒" z&߯F.Mx6~8rӳۢ܇=HҚ38٢ q`o>]oߐjqga:mps`V-8 =kZoNG__o]tvra|='͟b&2 -ygLhu6\Z>I͛W} RV՚%; Gn5!._r4ps'Ϻb09sP(@Ot:;}o5`'5;DL+&h+`uVCKX=!:z0z# <4:`G߄8awS2-Gޗvq+|̍4mO֓uͥ'.+bI-IW.Y2%-#ry":}4n[6ڭ E4KX 6-IG#vUGKhvkABr_ʪ<{r ;8޹ݵٮRRwOXCd?t|A`ZekޣG:Ӛ!R0q)Q Ղڇ0DP·+IsW$[S PL kӚgVv(-\vN IӚrvǩH8r)~Lw|Gx1@reNvi3٬TLyMS>@k'OA24>,vP6V8x2+9eӧbL" >=J>z=90|Ϡ."#؍Fi)-L2PS>´Zz 9pey%q`kvLV3T5 m8i"C) (6$; Z{%5VXQRrB -ʋ* O(wy(;_3J=+wM((FvQk@6$|;ztV,x VBd~:*W36 /߹XaqMp"6Bt۽8CM 2~ey c͘'GB6-=S"S`D`PFK"kc8,( sh%$GC<8Vрjк5FfbGhXR 5 ]R)좆6,F;\ES»k:MȥNa nnx2œjFkdY r@$"N8#+M ό4^z)h0: Iڎj *ȴQoeCuy=VXILjȀǵn@u鞶4MoޞjArE(pVӴ XЈ_i:^rM:.hVjV@{+mYY+l?oĴ_X 3Dɡu$it̓f!Ik8E ?5'/>a($wZ*> qmۀ̜֫,}V$\8 ?]L/ʿ'->V#]꫙_^ޯn<)i|^} R+⼏`>h 9Qm泪Ce>cK)揧HN[nS3!'e5r{^\IsҨfB24It0*(D+q sqjy" Ip!j˨^6*^_p@@Yn7͂؍ j]gÆNcil\GIނ9pҔ ɱA|! +%4B+[9;f+scg( #YGBYj<7cE ,”4]%]֤,?J $3ziyj崓VJP2_QY@ˀ JV6!%:W}h!m4BZڿY|w WԚxS./|lLIRFQSv?}mlB,ӕfkh&Rtvw^ZGM?1cLjĩ(m؀YA"q@^X5g|+˄9oxY"BoC\ϔYZ'".AWdc~yXHMhejIC&6ROl\ʻy(R%΍}o Wo*  C+yn3nM겸n,Y:XF!v#U9RU~W$דd_O}=ת*CXTs@C"V "o%YwHxV/jM_*sB*@QF@-*It%C"z\_~nru4wlct[rR ǟ]tL2]^4ן%o\}%XeCO_ "bȠOz?wΊ@;\'۽8Cʘ~olfŏKypjH% -WC03ZQUJl t0_smݘ* wx:RNS^DI>Lmxt[ܺU<  @CV54YMO_Tqu85D4w٦wm͘+ #(O-u"o1F›@xێ?@/0`a'*&To"b$#l_J .hDH/ rFF(,!ǔ828Z(d4xg`y0zej%  Y0P;W BY$gz]Cb F_*˕$MF]||)qZhNf`e82KK9RAb|e8B҅n, edj,תRc˵ޗnU*2o"&t(P#u@eRnUǜ`!4J ~s]+P +]`A8", 'AĂfیxA.]X 8̋yq|`~\z a\Kq8uߣI6x?o?K3 0ErfZ%B'w#_m j7h教7 ќB"%>.!.f\4`B AJT}gS7Eo_C$wJO&9 ;$/Om!]%$A1 9P @kquH!4+us^W{\kD1ËQj:>"d?E?8DcJ(ݻe|{g0G'sv *0c:Ցa$Zm4ARd<>0a[^S+;đ7"j-mi˃īv2Oǃk\t4&%jQI;G0jY p!f-ˆ۬ [CiCueQ!%`0d6k\W'뺜+>,mh\Lzp-K߄6gmDTȧqZO>6ۅױʼnO^Ytu2(7/mvV“5b'N!FTkNo; ~ɢv>ٻ6r#WXB7UC뭺nr  l)QK/k )iH3C::)E ~4ߡ6T}y[ͺC}U^M+uQ8߼f(L_Dϔ+XP B hGN hݐ2|⣛_L'ză7\/┞I՛s4&MhF]\%" RSRFdq7y )Tkpv}{O!֒ɻ3\R k.VYxqIhoFO\ޞܦtHaBzXLF n$z'^4J2UB{,%d}9De_$F|#CBBXFg |+& ?#;^(%dn/1~_PkͽoH7#Ai%0F{[П. }Z՛^IꇿD+ .NK54O5y/$=yvIO"'5 OӚh#Z@X~cCT\B$sbXhC) ,BVXBПʠ2bTr+39U({3Faݼs Rj+C4fgS$kر<ȑɜ\^!2/h[Q:T*RFe*h٪3rōZAFB,(Ah)1uL:f2ifT,TrvOBCk(fNQڪGەr$2(RDN`B̜ (novli{{8˹/p&'4OT:!섆>! `'4sHhh \{wϥ4:M/5S츯R XOE$4#XeRNh@: /PYSXadU,-{8! hR9yPcƢʽۢ?JZ6dQzjI^ثe i w<:z<:Mnjf`wpT+eQ}NWKTy[@l$VԠ ]܆) {Hpk=U >*|I v<*I/zl='@o9]o~3Jӝ#7g<3ɏLDgض_LB=PG_(PJD(tt;EcavQԑ|I?ib ?{0j8޲\itZ,G걼 BURs^ǂOB?"=G=C0Xg?#T@P 7^z(tީ $ÆZTmCAњ4WL_@38P_,h3Dgev |JG?`ݵp֞yߠ= BF?~9]AIt"zJ晤L'3 cåU߼#97H7*j)U)J*tFZtD8*2D4 lhOŭzfs*4WN8Te0"O4BS-DYF!d1&c) Հzh1 BZ՟TF)cv{NgV_*o?bVeNF5@I ruubLB'&z֚qö$g^YUlZs!B~m,=_;,<0%9"PWp:JGpsW^Ca$T5~>)fdkArm+Ue2D xim ȭŒyo2BPk+빿l\\ݭme ?F&i 6\6]0vbvGm{R۩FՖ╷d 3%6Bp&C=m*2 96d#iSv\k\wþj-X7=$8H6/pPr3 >GBq.8XL1e*^2c X!WGhDȸte)CQ |e7jz[ w&HVe @-A/x=TI A(&&<`i 1lSNhh޼ҽt[NP/dH9DxCŲF"5R(\t噃RJ_JAk })JXUV[얙j(lHN+0(0"h*&Kc@ ѫI)\(ϼ%Cd6L\9T 9TAZi0!xV  z# !]znL۰YKqs Fć EKV"}J˜kTbJkwzayt䥵MK-L10լ=hUv?|JCsRZ. W&k=<v^wam:qT +W% |V$<rr!KQ8'bCW2+2rV/eL1 ry'y p)b~M5zf=վتډV9v}*rۺZ~Ӱ6O5?: mn/$צ.ìZIe$r#UjŴ*t8W vv<+YoSZ6\Ŋ=Is=X83`oD昼Ȝ4҉ߪzۚ {,?Yn<rNtI~hh}Lb85~5,f$`x*HG ƍ\2%,6?tI2m=8V{x,r<-V{4Y73[73zx_3u0c Z&4O ^v?}twq D58^˽Pn=vt 率H[O F(EC F h/H{!ú]=7'RQ2=t:3F?raxǁ\)<-&B,R!8uǣGv Axnj^ 3dM9t3Ac7;;HC9o =$+ά8ѦlQJÃ1Ss;4.$\3czwx,e)z{I>0 ;t?c1{pP^j޿P=Lmjgn gdͲ7u=<DO6 \ ]F)oEtqJm e~JsAdQnOXƯj*iwV&Xl:܅/;-X5zW;;ȰOu7X%RE]pQ{Hl],H [o[J@j+BQ6M>D3]z#2H-nwYU`vgÈ3j઎gl|~~rXj-u޼#9*8BU4C]!T%M :c̒\qr" y<,bg76/P7b:xgA X:|dPFJQYD*!K⾋mY_(m^m9Lc5 z ]b*kGˊ=ͳMoGL;Y]yq}.pnV׊^HZ!j%F~Fp挀&b),NK+&=W k^t;;\/aq,ܓγ29rȟ9͟ K&W,|Ehт``V#XeR1KdUh "/~X̛<ȱ\7^߮Ŀ.5鼚<໙KNj9?iLQ9IKNpNw)9C:⥤b}s\R>Z7=D--aqy-)w`%+;ᝂj(>Z&"l_JMrӉO56曭sAٟGu[ QX^'uOJd it⫻Z,H"~5oxƛd44 ~>K"89nK gNs郈=9al:SFuUK?{O5/שCaYׁ.G)Hfn$a:~([Ks`܎`*ZV%Pr(#וAwсr^[` x,1lAѬM; ̉8sxY!:DF(Bs0[FKu!WЉh{mVS 6]W6'efnck󜤒 "#CF Nܥo5p)5onB֓+^ͫW\W7/_pUx<? ^r FNmӍ/^(=MbvIH=94i6hfUiV^Wc) m4Q yokwSr@fq+njgY\dH-0SN_A_]@aF㪞WF Ƶ66[z܌S}urZb)ػ8W|JX"@Olu &/q 33Ԃszpze7 Ω9TPG' r( JqO2D(aGyǻ Q8:Ig9%%8B`9_r D ?5? @2<'T9d=`{⛂`XL@TڎPb kc ^2#Hܛi JN+܂4/}$v<޴σZ/[n{{}sS޾DB~`_ f1L/lngOu"X?y&oIQU6+M Ț8j6q~ny~k7W۝9|ɰaAO~NLƭjdZVU9E*7.^ voY[GcHԂO)Rq*?36&~Z~8Qs#ϥW~\}^Σ 4qw)o_(]jZ⧉wAWq_a> 3fqTQ sK,x%`Vsff7f4 `w1pˤֹwMP@d,Qo=N"^k-2 mCD\no(JB\a#ޞg㈵YxHJdeM9f$|GsYq2lq6 G{hv#ɔr\h$ `ʻsև>vy_f"m7ާ]/ ;k1 U `k0A0.طSF f f=<͑8ۏ ҙ6*8-ʨ V['+/D &ӝ KՇ4GJn7`vg (0Ј9T92 rÃv-8m-c(S/eɖgpj6r!f1͂Gq%o"Z`p|s=xV}emG Ês <iPXh5 mȌ" D, u$x]LmA⋠yws~0 ֬79/gyˎ=EVDαcVy_C9⚾#g%5N\&A褽o+7yEM,+QUsK p){g9O\!(Eer{{c9؉yì—t3T7~ec=zEddz\syM#6dDWiu.ʵ,֬hYvVLtg}X}ҕ3WRDyܺ^vc瘣v-/$$  $!z$QAW$9AK $8^'o ۻ$ȍk*UpLIȱ;MĝP伏0B TG* zaF ! V L#)zDQRh„[m9Ǥ]Ix#EbMK}.Q !d73?& _JNB,>wRfe.%t2Ҋν){9Qq>cw4xM蚉 AI5˥[JS"&XgMqful zJ$AQT=4$OT?Op##M$3=~8lÏdy֋fa:`3؛omwŶȃobѨ+5@֨Y-1M\XSקּ^&a3sa؞q͛ɗ{+Y @janiOϛ/[}0k.s.MO` ȇ7fo *1z yc[x|VỰ5mf=Ԯ'GF9fao ;, ֬v}`0mZ(-4l }Շ>K0mlLr}0Sּ_H qܓ+b1U 49 3g4?opG{t`V4' uTaG덧ggZoƎx-UD-Yd[s϶ts8Uu{E> ̬#Vg!z~RjٳMMBqN6 G-_ 3`C>h,VY&g:z&8E3ra'g惤3tV{ yzEuCh/]鑇u/Þ^Sz9)q{~ރ tC?nVwo7$09Β-zuEcwCZuߝ[]n!N$w['ItBKsRb!_,!ܚs-ꏟ\Й1M#!{&"b@jg:H`IfWhMc](Ds=!IvZ""ARCveWrxԈ*(qeԸT\flB^ +7j)͟k2H鶳fk*KUz *=JΧz| :'#X|/$EtʝP%2DHM F] Pe(Zny( G)sIm$RңpQ]hMQi}Y\t nv5 @^KFY?Fa9QgOJ a x@z,64 H ~nʲbaeY``p}I~Cxvsr{NEщi'#MMEpQ4&%qeja}1;fR`giΟ:M&hd/Lb6d1]SqSgA2FVx10Tnj^GxicO5EU\5ݱI-)T3ScdD"D6Hسܲ]aQݤ_0&|zPytEXϟ6v5]ԝ4JA͓U]Ń NE+-u6 MSiE-5-e sø $ v`/@/D9Ady~*,EEiaXyBH)Enɑu5ۘ;:?-ٯ0vlO_#Td{":fag@-\,p5b!,S ˛k5LfufLſ/bH]G%5}å,wJl%ṋ05N]Q]WW*Xi1oBU 6j?o__8olO`087Z ˨q4V# : Xͤgh+%2b1JY9"ΐQ3``q@ seSPʃD#lAChat QN }my,oKjuCñ3x󏫟^.ܿ]mx@"cwl [9qK]խ%P[FFԯun}A˻/Dlx?&e@  Ox}(csl>D $O x}H2^@5nyS_[@2:Z@25Z@r6Z@r=Z@Ns: }mIh4^@YK2)q,HD&zV"$f&2x/M4ҭO0OOPOx}(1syp>='Pz>Fc='ЄrRoOw7ۻ?s{R#>g`̵0hN)FM#(޹?&N4QMdօ%U-E~P)4HN[H BQoQa`Ҡ C, vKTNR Pr{d=p\ !uqM.i+٥c8Hvp3B^2%K G@EͫNb .iTZL)d ^O_"}֍L#I2NQypspxner )dKY6xO7}!.md$SӃ"QJA} awcANcy (aFc@X O#9^U}T]Y'GϚ8wH_l[vbǬ7]*8? h8\Q7$ު:*'G*Ϛp'urAE;Ԭ#Ug nY-5SYS$(urt ŕۍq#2g mZRS-;pgzi[nUra_h1v>'Xgfof?/f>lVw7~\mswGϟy{S_ۏl䱢݃13:_xMmoZeM1+ps&?g:l{Bs1|!l#RnWExJ{Tҍ#閊AItKiJDk-=}tCF΢I PbPuR(#:^ ޚtK/-(hOUi1#Xi R1(:cnfJP{![2rM⩪Y-fNI7T/lT l(u]6dwfΡB 2rM⩨Ir_+,"na1Hf&nupY4Trj֝)%CXMSG;A h2Νjv̚)s|w'p֫J[GePCMSFj;+9Z0k8e/ֺ4E&5-rMԔ6e)wmI e;`]YbIYr($aC1} $@Bj[!R@/ʪzUut{)#cnYQ+*oo~tI[Mˠa$jbZE˶e$Pѱ-4lmH`ݩ\ IT˶h!9:z~B˶eGǶv@˶h;Iѱ-d@ZE˶e$H&ѱ-TlmHѱ-DlmHPc[hen-b5o-bǑ%;:N[E˶i$Ǟl lmH0I˶h;Cc Oȶ0'`lomaPztl C4-ۢe[44GǶ0QҲ-Z.#l Ck-b- #Em} ѱ-L![E˶]$d,kç`v'eLN/f]vFwtNOrε!9VToYpO ';>'I',}_yZSyڣV:I LA8]v?|?a68Z*|1Ӵ}!g89Ct"\Uw 2#pbgT j"Y$ `4.x~Nm[|0$E XK(PbZ0TOyNW:f5P¿Zk! jy^*ɴICRLa)1!? [u,1l#AkgmD Xϒ,~86;ZKGE VGZGbݍXj8h&<74rBY>X|,zs'H2)N_h+-⩈ X6P~,rG58F)qx1@`9 {.* ja`y.;x*KA[IƋ^_Hh@!&` h+9/+'O6 )4vTU9y 8 c21* &Fz$oJ&=g+N.j&z@ OD'`iȔ(6X/McW 5!=Ba+h,O#\%! sZ9/h{ ZPX3\#b- /#C8ڀL&E@ RyKy3AKv$xaեG'’r`iva`q?ˡ?J*-FhWwll1(Sy̩*Eۛ_ׯnV.x~_D򴋞% Š_ y.At4,N--?B^ dzSSսg_6aS8˻;FY;\٩?30,6iJʢ}(t8Ftra9}G?+a&pu\UghѶjzJ]$6ko71Njgͦi8O/NN(S=yyy };Le0/耝qLI> b 7 O,ye7% 7z7.{j/f9YN$ I< ŭfןY* ,|nUMR4L0Mivђ:z| Ԏ Ug4kfw6mLi_bELwYnC~я1ZGwX\bEVߧչo }Xܻ7$1?Eg՛gWL>e{9wƣt-~-gθ| `¢I!_ bβ/F?:̓Épŗ6EY4Qa k ݆`Ջ944R9=dxq9\l jXgXm ش76[eƥzq´bgsgE%݃ZҰAApalKHFrgLf(h m~Mk㯱UM `*s`[<RTp0@R}RcHvùO?׍$嚈z D~hh8Fgyf3i(cĖI)eAP&,̆b7i|=n\f]{G2}!';6KoXe x5 {H~a*"5*m˲%Te_~6KZԿ:]fm,;\3ˮ]j|Z%XpH 1jDPjirxZY؈÷6)*}<ȩS\zlޗ']HR{.u7ZYJ<֙u! !WiAy!\Un/4d"-$5>oB;e-lI䵳+ALy"l]^0xݟxJ@(Sö^wSh-VS~56ߍ\i 8$XK6FkXhfYb(tq&IaQ;Z-8l\& 4U)}HIl;,~ OB"$h ;`1;手TFJ$l{R8OM1l&ҼG9⩜ w^Ӕ吊ho$/=CC RAmР֐/m!ߌ5\*D\FTTq]x%5(4LkD1Ic\h,,˧sPCJ pi^M Qx%ޗS-_9a)ƥxpÆ5+I'cP!uF#DZ %j1خy&Nxn9fur= n/0~(O5u{&k@}Ѓ0ß#g@~-aZ:wj~jh0'(=_S^I/+o^ye9x)գ.r=ͻd?^`TVUEҧ?^qi#Eeo/ 2:T& T)o,Rx4\;k__jO7~[@.^ '.y%şdWQD,wg|Xgyb۞O-.Mh\\FAp78d%1OJEhA3U8x)6fA8+u޷2@Z7vR&}mͤ6(57<G CS_f;$՗QP;%7$⃓D#VXV刊 W!_`\$ʅTFlC -7 {nj봚%Ay1qDw`;ux֟LǟJԅEĻ_tg1/\,: %yء-^w!ȴ'$@T BRk1 ŷH{BùfB ҈{`,n:"롃)ku3ܞ:{ݧAu@z M%YA2ݓ0dA#.R1?4~scXKPssw >gh+%aО3spRu* @%ccFsSʾ1j-rsr=J;@SMbר!`Xm45 2N4 sXXn];q[qVf|קE55`{9 &6Vus_aEÎV鯥<,qg޼cRSIpdEa,|Ks6-͏p;}-$b4O@ j+@t(g$EF]HKh)?Bʨu0VIP&hM3PY+7CٶQu;r9[~w1%S&\n m,5GM#P #Q=E~94 ǔbMmZ!)|D{RkHɴ Y$b5:.#wo2S̨B0{},n#.#0ϛh 1=~n3:;~vsړ\Ew>}, =%QbzaЕ!8F=9'nh 4h8ɲw=u&) oǹⲑC?ZvFc}\m1[I Lሉ!ZZ0*< ?[)*u^Wkz9 'z:]^-@"Ѕvcnmkm#GEbeY $X``./s l2m9Iv}-n"[,Un,V SPeBJb!fjt˙-nGl}CeO蟃{/?G^[%Px_EȎݬ3_G\01gݻu-u1TeY׶յ+ڝΉToR_ 9 ZLs@m/h~7]j)z%k0BF,=|"h*3{eDt]#҆2.ͷH`{/z*c +5!E20^C$!hGܼ\̱x&As*A;!H(AH>GgE_>i1SNazuJ-|iysl_,$ǣSSOO}Ovc3F!iMI$%<ɶ:)(C"۳ЫLkD0?dMRT44jy4ճn8!bd_߭ ݗ>;Ѓ!{Pgo A8a]';S\wVxHA sow| &TьʞгFkD-xO8>  sY>eRrTA2z,4}:2>OǠ*??E6M6Ch;w?+22M3ޠ-c&wvyg9Jyg9Y.q!L%PKU"@:Ϙ {3VG(i7?UxEjn2WYm'z5w="(=/ j*.[O~0_ajZ֌ΜC糌΂՚hNF*v ʭqemЀ|_E2EVmBHNDKE"ĢH)+9qF IN^D [v2M&;z}qN.$nDn@)W#kB ,ReQ5r^Ԓ Jt- qZ)A(*XV 8Tg*p @K QDCGK뙰BRO+2U8+4lFVm'j0N~0?sϘڜ\SNF(dQ>uF!P; K5^B"(b0(%0 ~*FD6##|; aHcihwu70I}'_Q TpH,,2Jї.Y?i?j?<ʱښS@?@w@攫zkcܫV db`A S84B0y 4rFLߤyK5}Lmރ-(_FM`S7.o4.oʇyL*H~(YZFhx;/L!ޠjI65G Šz$_~~sL bP7}EπW?Ki֖--zpOʽn+rt|?1%CNW\svup|on2kAN{7i WVبTb!z:|.P65/[{ QꠍΛt_sjL! *X't RYUYr;!M,S<_YEڐ Inlvo:$*Qn /Ey=bcN_׮zA+`Nw=ӎ{{vHo`Ȃy 9eD}@N]Axs wīY>9NL555zw>w}ʝc3نZ\i7`Sq2FGThvT.NEyXkF`FXWhQ}}6Vj5wèF4e/nqRXnҿfHIdJmi^ݛ-V+t6M_~ hMnӤϵMhz:$ymk9@C6,>;YVDrRpɗ:U[Dyٍ@PZlJZZWK}Za'v6= _x驭5|J~Ѯ̪x_ g& ]fMmfpoW2B62_:(&(-aԣhCU#+`E&AsAQ褷ƙ8?ߧ;aӏ'#f՜ gc#,)8AA ( RJ@0 'eq荵ȬA0zhj竂|.T)BBX 0$T1 VŎ9 Ct[Grx{G0|y +8SjbW?JJj5)h-#ԀF!/kx pidA19ăEQgotقԼSUWHa񲅧zYJ "|3UEJ~}1dm{=Ia4zُnJ<'zj=o*! 1_yЫ4x9#姳1oT&=lT?m+ w(~Z4D!4@+>CH9CsmSښܓgnm1as4n? Mkطtk\|,ڂtZ Inm1as4nEhMghtCs=:Z'5 zPgF-~=R1F62tʛ` y'W]-8H׸ uPby=.syeVf?W \02;{dET\)0:JJteP)D &H&yGilZt!e#ULYJ!2KF<+h3RK$mp2S☣rp4ƨ4*VQ9E(yR;ywFޥCW?6yP(jăVDoƒpK[ם"C1.6I<SJEE3Tbmųn(Cx$h_4,c#($JK_Nbh^h}%__ u{gȇkE\qkm=/%`EƓ1$HKy on$Ga!vBMѡIy$v F`Rxh0aTl ƆHujQlm8A7Ff,Qr^mHzD'liPj.=%)eGypX`V#y;HCQ~9/o/7gY_-2_FW_)(UQK×"d!`6v6ojbzSF.O~thViVNg=__/3DŽظ*_&Ӟ2R pLeBN2t9#DޮKT)Tjƫ>PLt9Z 5eZj/ߧT(" "(ĖBjVxL ܆ R2OB_ujKN6<ϫ}pV{U Z+pԺ3l@RczN9)Ӊ:VE}ժ} ޔe>ZPI9^mL"ԓYBla e( G4Q0͊aK8kŜfI|@7 aY1ǙxOS=LUb==:KRRJ}GzgQtV{GfW9<ǢH z~.}֒73Ø!8 )$Jr藺r]䜓'B(iJ PR'dd`$bZrKeMKj%1gus6JimR1TnA(K_PB|!u(AS*_3) ܀71|)Tےi%-}9) H<ǐ&ΤT <,[r둡T)6$jث c1?TI/ ֫{[xd2Yf-eaٲqG;N(.E% m'uB.[lP0[1{Z݊]P>c鞌!jg7Ү-m_Wڇ[OdU{c0ʄ^քSٹ>I2F~U,wqʐn NlHo`&SxnOj6x/^ze}yK%|#_y5'FUpwǿկ~k?][o#7+_Y,Z!Y=E<-n69|Kdo%l]l" [-vbX?|{2.zdpE>â`q;;h8#Qf~ם;/s3J}i Ewm9^.蚑Sddy02p͢cC  iVj! Fv^aLm_HA^aa5,G w Zt+.\w@gE!Lgp }.Fk!k;|o4?U', ~bz3YX+L)j/fHղSP{ f[l˩*^)D9(Qj% ƕtQtVJ|e!On7%i:ܗfsF=[ ՉՒO S}Iu( bF;tz<$Rxc"cEp,jBGn%jY.h~\]\$V9%Tpn+ic$Eh+Ĝ"ʝ񪰜iJ`rE+4'FM{k7ĂkIՙX؈9QXe s4P䅧\КZx]] ,Q()FH*ߍ|St":ԬWd{ ۘ.wgn6 r7{l~m|t '**Bw A'mLe $d>N4!= ?ϣ }|zp&ULhvb. >_[_p\2ʺOD,jx: MZӣMdM77Ӫx?z4BZEpzvm&8_CרGf"/wv} f:4 ٯnu}ݲNlֆ W>t?q,oQoNh$bi"}4juUP#mTc E[78bQ3ֳ8ZtY #żl >ζ&naT5$_^jABU6w/d>řڶ4K+ ɭ ᫨ڋVt Fĺ .:XZӍz }vФmҁ90ʈ`coD|8#2Rz]2<)!KL5|H󦔗Gea ;)ع0e}wHEי|Sr(0Fzx1͸{f6'V6Nc(qg ̙~*_JU^D4t7c (yibPEg ɓt(.x-S-kEԪcZk|=&N 'm %1뢁YS!\<`yVHj3-s``R{7Ȅ7#[|;d"'N`'.O+3.8Ʃ< l`h@@SFsTs0bN 26g摳dz5^xVeXj\`n\!,ϵdZ(J˼ȭ2RT2G!R.U|~xrN<\rUz'/:0r7jAUgYal/:R7SuJWn^lRLj.߀6`/ڒ[i-UT )$_h$P(ʂXg唳r ĒlBNAȸdI"қ ApܩBR{Ǩeyp#٠GN (]$PSH Dnr⊜+%6JP@TY_ƕJ|T+q+4%W6MpVlR,\5g z;m4!&Tɀϣ5tZD09b0T+!*+Q5\pa9VbŒVÊI34}.*pwFop[GŶEEŲ.It;b.S/֟j:D#ᣟr̯>1dj)IJ b%l|а֜oO?_=iƄ6gY>_;=<3r׍Z$Rܒv778RծWQCDq\ҳy1}E$Q0i%t4CWἫ?tLԸgfl[&C6!( dF4f[&*i$%6cG2Rjc\CK&F%ɤ8'IwxnJ2n[>J ov gѥ `3k͟Zw@K>r|RReVm)=_Hb:zV] qC\^563\lsz0+IM(:\*%洰6^_/ y% yY-h:^oKaFs>'ǁkpuvľ8Z$~MR2*Fi_l5`7=8'wI5Lуl?58m'; m#nЭIǸ`hL>vRrP!8<> ߪyXQe}[Ϯ䢄ᾛ险\J} /YibPV09$;2I.t=~:gr,9.(3t3au@ZAm"v9Pmu}:@9C}U X{+32ԜV-s^ TP٢+P:1lA!ͽ*otR}v~tDoӬr4SAG./5kD3ŵLRhi+֥\"b}4lNq Чn[fg裱vjj!fCbا]Eߛ6h!~v5o-'ԘN;G.eYȢ2rs'ڀ Qs _aZt8xgte/+[V^g,5{ymQeZ3Ӟ qjS& `GbNِTӋj%7Ge-,72Z:kDrF1D%bBK^fBoFi`2ǻ/ I,CFB7㵨8w~ܻm^a_ j= ɤW'_` }k]W_ >R`þԅܕX:BEx_4x  z~/#/80ʌ32k6LzEݽ;- oϝ^E!Z4dCf(WIO3HzUA `NN\%8e"pRlkN #C?>p}}Sċ=x5d\ctE&ac(x|0=?}Au T4k^>|dВ>@>bE!ҚiR]ިEUeb ظ>RP:d>v[>#'<_?2It<ލi >%>" rs{29Z5A QTSqje,f1cHSDv6Dv"Eqɦ@)%9Mm&`*A5"GWԈUgTR5 i QmwG2j$Q{AyV*IR J1ShƔ\+ڗ'0u(n u!+njzK/rLJ (,P9OC^Vr[! #^,)<3 pGB$ JoeIx=ƪf&-f/ٓ'& I*/8!g.aI]neL9q"%V!n=>l\Pmwcl#!UGPKM4|Jq L*v^ |veYp*fnfe`e`e`e6#ʡ SY \ 0@Ơ2Z^(e=-'?XYG0WԉlE^ܩ m(VwN]үWϓgQϋWoR zI+mJ+Į$w0!TQBYJxo ʷAmFuȷ ݸw $7mJ ^ ŵ]]{ g7> }= CďEeەvzzW~ $?zKP <~~.,z`^ = _觫kD]3[t9CZ|Ō>f=?4N9ߙ߯nӿs뇇|>6`ɘT!-R)u>oEviU=Z e冢I\h9C+BsRWJ; 6@EZtM0+ 7+ pI#x{K),Pm/,ʡ0Y^)i$6Pܥ- }V bjx#)HOjCf)qנʹLˁO6R^͘gľ`1)yɅGjE-m0,y+4V; <lG T/l2[帲[AOXCQ[kƐ?=nI"e+0E۳Όѽ=OQVg/f dr!4S%Mti H" 'g8Z.2d#8MJTZA czW>+Ph$ɡX&?$WSYMKZ}ւfv\ζZJj4уEK~o($ 5ƦshG90}%Ȗ#-)6`[L1 6'b9ۄd2PɡRE'S** n{)1vab[1D`1z[\q(6?MCm|ݨknV]hB>Wz$3kGގVf\_/BNon:X+]uxۤ$S.F~]T`:NV<tȜn]T~?W1NL |F60—;: Nv4iZ6j5Kkjk&(dgZȽh/2hG~o +'g堔e rg%%M)7=2ze7?zAJ0į ^-{ahΏHR0 ѯ_˽M(y*G42ȼ<kB,v\12nޝUz9YUFIڨȓ 2y.r%^܌;G#ʣY#&_꾮eOrQ\hT1Ee̘bT]l.o7@@W_i(bzZj;EP\%5RZ\hF\ӢeuV4F{{m\]h^6pF!k@Bt{CzmbAن1HEPSJel Ɓ6Y@ VȻzXX lLl=RnMz:oa[ ÈbN֜W/V옹$ كM+Y~4ODD.Nt}Ye)l9>:|\?>*uxL=>?GA3U^ q>rs=eL=.!ROϧ+#5Z)+Ǯ|-I+ &V+v hEFCusdlLyyQ-OH~W7_]놽V4zV sס1Kbnxr\ɅPe2dYil ޟNoB*)4OrsѠW<?,AN?]㱻pgGy ;y#-<~̵RUM͓F,'=T.\;\D+dJN_}rBh㲾\k~Nw6br=?6~V?o Jlj:Hg5ė!yxH9N~Ol6BPЍvZ&W8-\ydӵ%㠍uFղ0Mba2$>;-^"a2=N&9H#Q#P3jqHr5G4{Ri1BPIWՅ35KXcW.P1f8h51_4Ę g߶O[P){^ه5f(偒"7Ҟ:j}V1z1Q@4\CB}Wn\&Y4ϳhgc">Y ,E8>."ׁJ! FaT2$GB}BUk[AyyPxTc.ߞLo?o'hkfoݗg_rΖKe)*e9[VV42Dqq%B4.H⥳E"HFtWF$a"A HҝT!fkFJh\ B55]C7 j S^qO Js9Zl",8:  -pA JPJJ Px_R\T! XA:vuĥc*g\ie^3ϭIbQ fDxkȨ Jn)ŌW) Fe*+1rƍjBߔR|@͛%0$s )9ŭB5MHޭ/Rpe6nܰ@m]ݩ^}  &(}WDBHT%]LMԞMG&w`R_i nw @ #n j(>-hȍ:mXloL %ݥR{ߐ C(WYo·$RڑƢqF`h7ڱƷTSUrMh?I4qoP5R_r/^QJ.S)ɒ < LKK̃x@So" yODZ\'G4YM }h!` m`LTgR: EL2٠. c =a3 Ҭ a-xrߡ/!&{yi/uY9&O꣹bWI:`A/wf;Oc/%XɈ )i/$vlWaKyi;4^(m+ol]c:9iX.7ɮ*Ԗ$T_-C&+DJT+3X$N,W^s}վ@- ,$[tSGr$}4UVDDUpLocpWs:NEPǵ)0i5BI) 5/$5k WոK!4&YPfV)NTASOt(2!c h6'vubk).jbLђP]SApy ((S"rTڈ=ު}$YX^fdž'=ٚ~n4\un7 1|y*̋'_gWl#`)t`YSƅTؤ[&XfcܲyD&pgHrto$t qw_/cgS=+^ThA݂I_uEO`tiQ%G/|7҂Z#a7Ggw72)8Q7bt#Gzj6@m ^ƌmG}:?/yuG+:{Uc*DN}b:w[.g>2e4w[PmO. .3ٿ7زAq0쬺Os<߳5B1mF^YhFRhHP3H`̪8F3_ˁƴ!*RT“ˤ@Z&~*)X@ni)LYSqv0JŕsTY}doM}ssqwЂ=w$TݹjiGܣ=AgswXz#Pc/ΝuH ^CЉ4K;^n'O! ^ IԖg]L˙u*aM)g*NkH:wx IEppy;:ԦQ4wDXc9-rޣt?YN8I󙜧yz_N NOrsAI3?˰8?#&]4|n0A>'XTx.<#tfwGUJOCQ0恮oo~voi:k=E)xGhLiGo׵f[- JDv6pT7[`Bօ|"Z#Sѣ87[- JDv6QP`ڭ~uvBBs])Z8\8Jp,Thv-WT2?tM'w#Y RSn=#7P ޟ_k{(;M.DZCb-[0E7)Q’F>lqq8z X: |˦*+TE}Ci%}ͮRGUhD҃:>(~--o\(8(+/ 1 ~5ܴѿٻ6vWTzIR[FvIsjy>5 $$EN@HE ^yÇnt7svWHOHh) UB`4'0Tu8yJC2zIx4wd28'RR?)JE%@M$2" FicsJmEEpX3ٔ\Q3}XLCM[)v?B-Xl/8bD}pNM)7Oy~sWagԵȌb>4$^G# 6 x636%&`2,KJLJ;O -|0AP6tYDi2f3ܶp|0,vW]iyovVl?}^VcϢ$i3\QRF~bK0 X=ρF&ta2| $@&ßhu?aU9D,/&ĨbWJ2qy1_yJ(@.d;`H͕R2m/$a: ϊ@\}bh~ar KL|pj|g?ʈ?P" ޹  %} eZםϗx|лC5W Yj$(jFb$| vb^>yٿ6%&0sY7\$'^ Ϧ=G̗#?j\웟5Pt)ND1`~Qcnk Ez "u-&ʐ* +Tϝ[DQH,|kWJxQ0 4i U΃P@IV\_&}tϑѝm$;_1%`FyaWqJNS7W<"eUQ}RI$yɣ-lQtmף]02Ք<f!У[&dHg6GT]1ՙĄcdWai^`Vs; i,#;DX ,rC0ʍ)qt\1pMr" ѧ]=͏ V) ,xȤl=-Vy\S~d|>d¹&W ?iUg"^M'C؜o<vi3ofC Ir(?׊]BcҞNͷAP ŧ3tp A4[NdOW(_[_iH VkbCLk9gZp 2 Z̙vRU%]PI A񗙻ҶVXgA( R]pA 7L't) aiEtW|prL ajUEMLmuC@1*j=ml2H­N8`^S4,#r.׻UKcMCP`C,a.l*&#{u)}u\ΖԪSqϮ0HKGS3yRI%Xx4.66R-~Sp1]+ yzS0j4dD3n Ȟ6'u`(NFP#R$HNVlöyöMЅ8^Ba@}ɞ66)}8@p K 2pI+NvA;a9*-mau{-Szp^G$XZ^:TEck>ެC]IH4y%=m|Wѝa 8 z1r{N69JX&:g?׷| ڪ"J:wc7u_޻H喇zJjn KmYzF3s1mjB^˔\Ma:"zL7n/7]]]|]f>-A/vJ:"\q-ߔtd^)0(@}ϛ_C_/ſ\]: v.eW:PENHaF d;VOVN ^Fٰо* N_ Yۭytn Q &cbS4'{{dS:{g e"#d% GtjAUL, Ǧi]ǥ;kDqa wI5?._ܓld6R@,ֳyH5*wEVZj#x1o;kx\rVVwh'pNhדhDOJn]V@ (\TW&Ua5naЃqN vBg[`n @xF,zFM 4zʺ٠o'?أ@pg3|wzUvd*֘b@; psM.)ƳM@Jɓ0<\8*,$U|LƸRT0i\(5Xb.֤ "io"^㹭niu۹( 9\qs5o^.ɭ4ʃ%6HZAe,<ᢖhޱmlZte@,:_Q:;wgO.'d*ٓ'r!5:ay/b N<"4ĆQQoh<M/u4ZWu\_nϿ48|S<51m7?[}+Lϗ\[NPAR`9m|9oGtFz>"w|P`t^HaA0g )PjY/jЗ/@_ GQ.so1W+T`Q,vTǏ:#LsKSSb /eG*awͤYVg^-UU(eji(K&dR׌ BEl: 6`5m|tT_5nP /kU* *2J$ .ZxGqP"W11๶kұq=F*H0ig紐XL?}?ZvYRQjy:ϯ p&PeϷ?ۘlLΔdYj_.if/B%Ly GX< k7YAB;!9&\Y~il;1%o^Lbm]ǃhђ 2=di3On՟. &+?$o[?9..R:΁l\-u $MR].p53;u@0WeҺǛ;P}ӞO;T1 *ID〄e#X[ icDLG:q@Jx&AD 1,& U:zTiQ!T%S'<x󨿤n{8x).<7zɭZl1JOz0xFJu*Ё偝X+Gg-Y`Ƴծ#c @B䇡;MViQNsV gxMc}Ms6h[S'\nNG=vXbMuXXۂ)1N'k7 6Z9R d)Ȋh9j0k&WF4Z9HyE>"sRWh^ (,n;O CVfuKpȈU9ɑ9) W&G'K@c{6WGs`~768i'8hbRYr$Nj仟YJ)۱)QH`[UE*cH07\KHp> 4.tPw(FQJ6UMm.7qR\,8/hf X5Hl6g A4v#`#\e6^# QοŒo*= 6%r6fx7Մ 67ذ{P ~kwC _wB"ʥna԰_Br_ev/ Rsx={)6OBx!Ż,nQʃj6Q&7f_;!xJ,2-YD镺{Y},vB2n\\N\Z0un]"zT* L\TZpp*0F20$Mhf9Hl8)(=XuWO=Kjk6+k |Cmʯuk zhH-.*3XM_{GhEK ,Y)F],14j + ^J/noǛ+КKrFQo;?O$NDw)8qN'?$itOZ̞`E$\:?(ba`7p04jcіzN#sNxqR-jDH-.12^tޤݩ^aO;iMp`.ngb&'C,0% j J%a(=M;=_Rƴsz`эeHmk ?33 (K)Sh&MXE@%gw'/e>B$#ћTKa.jov;TgqZFG![U?o=n!F F Fc2J~6w|c&o}^yzR\V>-U!w4=7+|#wX|`\TWh݋OeXs\Q1J_[s Y54<]fy /.UH-,. c(ҨܾNHY0k`pi`Qt,gsQ#<+ RcA D ap4,cR"8՚Eir j.ZaWq,oD{nwޱd]!'ܷk(Cn,PE0#_rv:<'8%@Ƭ+.[(_Y%u,2~u(!hb(f9)HȨ2{\Cal@ZyPSOc'rbT܄hI(+,6w xG]Y%u.TfX3=>+}qUIq n( GwC:BDmΎO{QGaĥ{#S e$[qkBdn!gAY 0P\Gk6UG<`0[JQZ>).Z [!# #@<AX #JVelac3X v`g< a4x8JrƩt%TK:_ niwlM,{ZǘKJ8Ӟ + Xh?kaž AI, UH ~IK<$lQ k {u=aBlz1&a/a`BLeBg9̛#2ě+GR ϱ)3cnpfJP42:*3«VU6 %#J2(C :zB jyuu.㜪f@/CҜ51§d8# c6;ƮHRF⃃p=cJ(%?q#5*{| E8 *]1=)z 6c$#F{Sb ށqcQʴꙞkH&\ϼvj穮vj H6bCт8eA+.8 EETvR^q\VWպ~j_i,uB^"C3IZIJqr4]d2#pb>%rƇlHWw5F&PxK`߃s@%6ZxzC$R-3S^Dh"g};J1'S|px@:tx^"VrWdté=;[+OlqyW0 ;x :80pAyl/Uy–,`0~7KϧØ߅~uQۿǓ;늻Zyˣb_@$޳S?&= }i?_X+\88p8{8'-'p>HAA1J-fi{kFY뜵'q`),6ew }_A8$EHݛmG+?3:u+?ۿ^y 7Ꝃ.. N>:.bGOOǿF^^iYgplҧؙTk95Ž~ OA\Xچg`΋>=>~:[WDvf݋_ g/ֿ7Ѻ_SfάNH,Nq3Oe:C;ŢW dV^a~ .{~}2(| vg;㴰VDhxMnBSp~~ Qzc=ss;zp4ss`ŬUıܚMHܺ6?Z);?Vﳵ:L>y1<S1<$rs'l2xS+ B^CAh!P1^-ndaZaokX M|qae$كS FTX) 2HU-x|L1w} [؂J"0l>D=hl@ (:@-h@ Vt(+*a 0P5f41|Փ1֤ À1`r S(׆u$D!j#<UdCBpmcz#jax<իf$TjFMK!L>"HbQ9~-SG4T0uxڄwQ(""z*@L1 i/ s9#8KG]B.- fҰWjZV#TBқc"PIe")iP𩆎Jl&vB 4i*yTlPI*iC%m qtZo ̽,yYgeo^B% vMWx"TRuft ܷi烁e@IA9V ﹖1fXr8c2 šbYz#NkN"KN[, tׂ `PEjP;~K]M(`5̤AjSwNO#r$"ĢhQiAis-C3]^)q(H 2o0 &#a8 rb) b(ȏZRx9;.Y/f C&pH}u,0LÈ/ԁ…w!.cT+' \)j@Z2M2}h•PUE&69(e&8VM `X%.4hSJ:FUiA7[kXW<cKK$fk: 5V ("`əW%4LjI"~O̥ #<,{جL Ϛ.C`A=Aԥ*}ݚW/]E!TK!ALA_BZ3s5(AVи@%OpC#ehDx}?C yF|L+2i@DF - U4"DZF[ +@.C. Xn oB WHbpr`aTMWA4+30,cg)eAUe$ K;=8pk!$ Zp{Ǫ:+3(&5WW5Ҭ#53_,𚹂ρ@GG!]^Y^YM q]hSK?3\ NwcLIaa0 0e)CXcp(ޮCObpo&V#+D`Cx(6  {fj9apiҜeM8bO˿fz- ARd aL%;'3ckC1"0)`aUiM(+`g# "DJ ;*0؀c;N=I`_8Yӷ39T3Dg6j9p( GfVA BH!g:T*|arxP)5DNNf̔pZHVV"K ܂0'LN> 04m}[0w)6|pJѥ29$֖cˍ-$Up04*Ĩy`io0J,N` qZR #wD=@ Вy*c󲯽[ _+f 6XD}cm><+sp! PNk5zYh#4$qLhcq" c .TK Q*!# uCsyVF@Spr Z3m1Ψ 2L+wיF'7}rx90׷s2oGύ^|pzUEewqɧ3TF-Ahw v8 /2} `xi0'/4?2S]ah%9K &U,5$t;A^8`1zVLm怎BTǭNgv顷{ӣS~ol6'}QZMK$j=Ҿr_T}QtʣR3ujߌe=9g' 1sLOY  ?^))%`вDj)(u68O-Q'qXU OU[I;UZ vrZbXʕF#W6FN"fxPHc@ qA't=Ṫ Q1q̛aH%M˔ΦA2HZ)c$CN8TqŁ-~8鞅;]}֭Y Lzo Qan[l!+_NbUx?eOXu!(d]a|9 +ާ"-1LEd w+6qɸ@ 2Ў> ' yWwy'5J _g_6We9%aI]0#0|B _\qD{B?M`_9=_{wǴK, N~UO/.DF'?SnWBp]--.WkcUֲoI88=P;J+]I@!HT&p8HF{UiH(r:ͺкҬ>ߴ䎴0o%WTB#BČ侀YiR XeR@L|,t*W"lT'`%uӿט.zݪlwzi2/Mk;jV vô-SRSpl0Y2UfhWg,Q׿)R^[7HWfN1n\h9 0(j.,4s9UP`T[%K!ƥ@$Y: ,gFr-E*7`:νF;.>'F6RxZw 79۹}5UJMT*H}5{bˊ//8|jMM`Q2,"1Fd>pu,rńH~̄3jѩA{EjM@rkMk9[]+B8J3YvD쥬,^|jweXމ3~[0M0vBWG)C0x:ϿyغA@:UXr3(]_RS~c CdABZVaK9.|[W"My'`Y_9'deV(9oU3-Z2SJ<^Q g}wKhS#Z8^A^Q g[15_z.`U8ygN;;!n̏2ox'I"ŠөkW^l| YOrlD~htO[@pkzBn*U:=LVa"2QNGg*jJ)ԒD[<~Sf842dy&a&Ak |wu i{oF~ɇi8HѬ41<= 믾T8<pm{rj߾m eGڽ }?O` _ؿPZЫ$FzV,O|e􇧽R\JWO2ӿ_F'9xQtgOn?`8- Ufώڽ/Drc22)iej3ͬ"WEbYx &=qJ/ȕx^ס]+@I5Dj?#_l;%s>9iIEҒ-5:cCۛG[[MS^sknFWQav. .U~p%Vj';;)EݷAZ6EIb$E N҄JR+qm)vbJ.P{]v!Zt۟yKtX'owp>=/QG ؿs;אZ:+d-igu19[d ^ϟqKk8|{/Xcs+I6 k:F/ps~4Aip20dbt5IF1y)b jN&$fuPuPw@uu)ݏˋ%@^ɴ{5(Q QJx֡ |~D_jV_Z[;37s{dњh2NX@ĤR;&e]KaKiS.i*[*畍A] HvwJ =f@q$ĭqw@qqWI |?'^\sBvZfѶDO& oҝ>[K|k8{pdTK X5UXJ\/{&}AESk.DoeNb|N|g ||Ψ.*QWrB7Ȫ2^@1m mm"Mzqw.QsozZrAs@{ү7;^^#2ߧ#,sIF}7 j%Xͦ&2$)ĝ,wb/ s\ŗ4W10W1U s\0Wq}so+uW$: :10d #H&bbuܽȒO}+t*\:.7~`()( oirRI@bXVƢeZN6tngr e'r wrrK KU'=Dt>"S2;탫1wi'Z^5@#SpA?u︃vd '97:Uu&FPY90ڻ`9he3h(S̮v:i}gt$=~=}@wtн>](Jmd@I|۽hZh:\DVBϤKpGPFXJ4W'!kp:9S 2Z#MRQƤ%*1ds$TÁ!(v2%*nBb(Z_>[Jb..qK5[ sx!=aByӓϞ>o/|"ߌay_2͟?#~Z1|"7|!YM[Ǭ.pc8|5߇N_{6K/AmEuzTA{-@d[qw|*Ig_zwm8l<>ɠoX_X,?i}"H _> %mύPiB G- Qrųp62܌,?5-5 \[ X5'F.-Zu2M`I=诅U 5-jQ;~I;L EvQ^\=&p7FHn|x?J<8:U˜~ϓ1߹WaLTEE, u1F$Vz 7Z؅K=+Mۖh~`^,\F@tRNTɔJ%ִ?v\?*g|f$=V6d}Jbeȹ`Tt .h'J;Eg;ptez.ypA@.G?=px?@cN.aI/âlkOb܆oiz5g=9fdSΒ?֏>}ߘy X~Qk^|UkG!` ?֔j|2}AO)ÓޫPJjR13d<=Ey[δf=Gʡ;L&VXw{^'BH+̎ay H; -PR>R1v^^.D\v3pE.]d\AB Oguե#2E⼎Ԓ!XN a)ɘ $m}3|u8y]}w3I`b/*H)+tc(5NVB5j1#fP;'@@$zRSͰKX{mCG Z mt/Kqz\ܥ;1Nƿ]~ icrb&qe+ 8(AzvW81jxQu:+-AR؎=9f_LHRsq.kV޾W\I4*ZfLJ!|μ$q:_4LzGsfu=3rvJm1m˔]zOsDYES5lNըC'^w+7 B]aiG5weuQ%1`7F Sj{F{mz$E#tNKkqӨ l7?=]*zޖmRчHH:a,ʄsCMFL:YSȊ2,=>X,5|[={6=`?)7'6|P[=;feK\y/:Щ9ػNem簔0U1kSwN YEDn֮F%, : 8e!bY1IIUML'Go/gFdZq8}WJ>(E͸{XҮpM\ǸD5)ؔ.]U: K9fYBT5 JbQv3.#Nz.)i//ێ Xo_DL2Yx9b RYWNP%Zc)3NoYM !p5Uk̦8 Kџ oc;eS|13.zL Bҁ2a"dXWZab)idOIJl;>hS ITL,/fZ>.aaH_!=90Z'rjK9T0|MJ~7K0NƤ]"-\2]cz6sm/73K"Q*!˲ HZXK}[$R`m(R Pۆ2#EXpN挼Is/ۑRo:EѼZ SwiB"O^1ICFrUFLr<u9EN-"HEPtkiSVlbvE%t(UTpq(NS6Ik)c$2Ɩ޾GVKcb!yc*d\ kFHj -0m}(›){[!qwc@8..|1k_pV9x:ʞBY6){ʲJ-7)zw'8´Mʹ% ZH΃$Khi!cI aZ`Z cʔvg2iyElhR5v\L4bTNW'((kܟՃ@PI5AQ ,)u*Lٺ@!Ϟ#*Q%7.f^'70qLFL 5 Ja:ᘵ<։R  H%bS$k]sGWXtڡhWÖKnM%|򕫟) ȡ5r$*[G= h2F,@Ld3 |bLK)cK{j=K5T+w5Bh-eW|MPh[[y/1+T[ =:hcJ£;ebN,$*CXoh(o]VNbɮ4:kZ[2hI`&wHЮ 5 ny`oӠݫ`rBfgEl\[gF0(eUh, 皢JQ@.R@*iecTI*p (a#0 MgPޱiPpa(q2Oc.6 Z0,02וN5 @'n9|LЪk3J8I^d{XZP6# E4ΰcot;,{SK9 (,")4 ``K 0㵆%Qd\5jKԖgo㠸`Yˑ˾TԹtBBWׄW4i 14)ě :N{`dàt+]k Fc?2B5Uꭦ"M$-.ZO\&Ţ0q<  *(6= Ys-i-B1eE?B&J|7ʨCŋ V xA \x2d4 h Cb":pB Sy`vfldy-L"{C3=}Vhr3 Lq@ɺ=2άbsPJ"z?b#M*jSw-(sV5\'t p ǝ/JYSDt:UIxJ+n,JsXfZk*P8>Գ I첥2a0ƞBvmB^4^Á[lQVuūdI 2 4΍p;'G=[.D̉*E{, X[OTm * Joj/Hm@uSF?D|7*26%t 4~!:4iNa^f"UFDlds1:)AB␜wβz(\IVDav|4]\o?'rdlS=x jڴPJx:>hDq7eOV)*)dupIRi<28%ʴL NٌJVΨsU' RMd\[e4 gl亠:q<#j:M:5c#,z"R2C<Jd֓F)o +/R^v@ nfbS5(9 Y0FEY褽(8("vR3g3=M({1T ƍ=1FҨLc`t8͊Sj4jm*# MM3D8(zkx; *.yqȪ D(sscFlxR~'v#QvJB$ -V@$wq?;ni&ڔon7laYU*]y%}vRG\lu{8s<3]LS8XV^#Od]\^Ƚٝ^Mz֌eѡTh| xM>B7狚Rj}JC{*ɨCX.p~]YCsMA5\gJAcPB.S @`, ԁ.$LءmfE5AƢk`v{yG ԁ8Q⸥}쐹KK?ܚ3o16 1 NuMOxhe<Fkx).9&麎n{eɖ)炳4ABŹh,{jŧZ⼹[`C>iP cr?'6&7 eIBFa8ޔ9X.0i zpPv4#~ B Tg\qqPڽkpIdaJI9=A xoo%lw:A=I]Yf뫏UY`\NU^jQH}VRan|"ǔU|A3٦_[2'eRJks D(PY4M~7kXA] EP wj$z^ؔ4XOJV!.٠(+ZӨ%,f^m42˛VULؾG "7&1OI=nq9hLl$xH%H6iJrF"M hGdt )qOgҬ6kqrHeyvy9Jpc5 Nw;O7 ?gO>9'Ycm%k> ?ִ_#1F8{?̗HܗLo7ܛR(6TͶ\_·e~Hb Neo?Vt[[\S1k!X3w߇|3{lQ[ȃMF~B?Y#GK&efEO32w*ttRG- ~VKgQmqnֳKbR r'|H[QYtC%\ys:@/328ND%rE&tox 2BRֿ..ovM׳GjY?-ߝMM[k>u^MgsܺG16:ojk}Uǿ bܪ|lG\җz)fU-1{oful%?[ V-}m1BSp~1{#T?nq?t&DSo`YKÍ znP!!Dvʃ6.vLh[v !rq+(a*isRH%*E D]Ċ&OebJ t+2>qf$Pw]K}*OV߇ٽ)u#cTU;]ojr5Ɉfg :0SHR7a`  dw;KgTBv~8?wlznØGF:HZS)?bD]JR`M>c4̎UV+J6r,+ޖVMusK`ΧYNI%dRKQRٛݤh~OPԄ{]IZ μ<ԹFٚ4t;}m8jATUiP@ڮv4HpJB/Bz\s0*T6 IFz}Qeqɧr$*R[q"Kɖ2c@X&ЖxTt̑;2^ie jI2AN%cX^e!ϱ5P]y0DycWB +Q7IGL?dIp{ӕPQWYG'jEr6{Vc򡊟[ӥ؇r-dR*N|"kckmH˞],U_Af6 B7+#IsIO$[,JMM1@.~U]U]]I "_G~:q@4JHi7}/ma3A;Ū>`%NJW׏)P%qi%cP4M9V$KLV,4 yL;ױBp+v+wLfs0a1j7 q&m IH̨arF VS1cGx주h8V8ˊ26 UE %k|{Bf`?N ƻsCz?cCyI1lP(qj›0ZhA bfi(R(OɎ#.rQHLfL٠4 'j1)*q.47!t~ U˯ BV;㱓vɪL훐*8U`fG7t_^o=ׅү?y-7nE*Uz[j%+QhCG1ck}[JDjݙP!wgB2d@H,kZQAQKہGp!rQI9g*:HiaPRy=OGϬ#'zͻ֟zsldvf6ן&Kbew~Xt2@ɐv.2 Q1W^CˋԪ{ݙ?|K7j).ęoc ;^,B#PȇGP lH-%TH,PTcga#Sg4$F4rmn%o\':]պWClwpӇi^d1f Ou{v8B!84(#915qpeB*5\cwӖ c4|^<F/'W=Adp qX57 ^<GP4eJSJ vXI'PkD& `cQ@ 0p3S d:x4qHT:ݓ;sOkHq8~8^`;8n^4YKu"9 sm_[H)>xU1$Ʊ3bcV!%3%kQv8;5h !B u$肬\s'\ނrdK,Q䫑†rCLT(ñp*D@/SUҺf|DdV)˓_A &NŌf#d?e u8>ailA & y 2GH%.VTcEAtċ@A:_jp?!5%kQXa!JClǝĥ.``EQי8 -S$r|dܠ@E^Gpҁ93EΔ:@Hd OЍ|n#tbn+)@ڤrҋ #19 әhEqhI,@Hk.T/'ph3mxq1IdMrT##Lpx=d2Ӗ1>Ɠ+gX[Ž{8ƍe>lƓ͓ΠD95Y Dϗ~:ذi)YmB ~x1򾖏3X 3 qT]e@]P( ! W29q@EeE4*^z_VB< qQj l!K_8wUʂKCdl8xg@pUC2~ CЦ"t ԅ[igUl4ʨ aM@bĊq/:?,qZdIyUHDETtUV"oFPg%Z,D)W$~ DY \f gѷ2*bޮq;J|zQBy-IDbM.!N6!?4 ro|GJKz}'*PCuN 88ax |o; /_ >b zOGf/sƂfwptۨ&OfY:lSs:J֥x:PD't@ ;]J6+*|GVʕ/YF 3xK,;Zx)n"{pAE -NW AUVUA5€bӕnT\aTg*^UwõpҩBW|ЦHvЦN%D٫hE'&_$ Rg~apJSyqBҞvPND5kr 1څz+|cטg=3V8^؍ Rxs@!ҡ Tƻos7AÊzӺO4T 歯ysuDN5˙X fSSSxg2cp!HK@a#;%xAA토Ww4m-@0zw|mLO$ϜZvoreOg-w`:)4T%NPb u'W6gbDeiQr,pV\Xl08 :F8QHAGSiLl/wT?E#_?Eb|qH52T2sls3!'&FXVY ɬ2H,OV9Gؙ#B,xHu݅j t@ r:@h#R`P篒zzQ^C3DEJ seZtY%˕hUF+O_32*b4xASŝ`0$isY # ~78dIf{:yyT0Ycm~-τek,X; RC,͙IJx:rcj1Pl$r2KWCJSfOan||X!Y{cW^pO/$iuEg[bbAtkyy{Z|~'/0qjVrm_tn‘di tmo^`d+xR0- b8FS!P>XuC DD!px !~oq<ɽ..a%lOi, i:9dTIg)q: r(K^H\K̵й`R: mLL<ՂRKk4LBj@ZM"ځ:m>)p{jƘ"|E&|%բ\(^ٯBE+JDTlIqIIj9QXsj)1)8R慄%AD)SkT̐6vE GNAE$d)q%Pdo]x Qٓҗ9pHd {f vjȡ,F)=c?xJ%kAmӮr` pGU[[iG5bj Z)h,[Ububp f >junīpc^uEE8"]8 FU/xaLP]? V@5+\&Y7a,xIs?ŽpaFqW/dLanRT2I9CIZ;zOvuHaJH5w$Qcq*0(c`J b  2rKhj&6H9S/21bJjʳV qY֠T1ҰyCM,d-QRj76+|z8cVZ$P#H)f2ÆBY9~HiZ)$d lM y^ڈzXt=$YA|x֎B=a嬶o$0߿F AAV€8ڭkXl;m<0-KTX[_d9gHuri %^\p.]y~C7^a c0֗@ 9?ME8R&Wά~Oo^O' e\uAu~Wu5\4JJxg@8rUnzY={e_DѡO ;[ӣMW 5mVx}7WOtqst=\j/z}GD}jDyq)Ď8d6wr(31 #1 bFq̼Z*&,2FKPbES˙24RHJJ2J P2X<88x|݅j"@c=OX̮bӫC<),V)?j]csΠ0J޿ !eLiJ)r,H wPD& D&9JPEO%y!SA2zʣd;\j<Fw~_N"ic-! {!_u5h B$[  ;dN#Sȟ2GSfq4M3(i6Y TjZqF`DL:]"BL-݅jYȇ ZyCRP.R@@W /I[I??9?exi/w"ۉ^b>=^=_{Obl<[@}-+h0NsT0EBKg]/}1 yz_+E3'EmABW!N|aAT Xr[9x'/68#h{KdC[iT% {_G{ﶎCtpdKnDb?wo}$H,xWXb3r܂뺷o矬R w)އE^݃{O Iv*@DE_8 }L&}uVps.h]CFsgǜWβTRw0q1OyA'b2a )Y)K{\wU e\pW%S.7S „=&BPw>e.&hÒGﱏ$(8: dnMoȗŚ=EhT+{wfK,6{t{(&Że@vmkdut"/UfAEvX-NCS9<<$dPFX8J䄌(7GaL<>`BsH JPb62RĽn>#[o4}_n1Hg!U,.ctvwaH?H%0s<$ÉObf]0jNE2.MP [?D!,(c$<|W%qqUmS/o%ԱS v:t?0zc8׽ߩA%|h!%=Qtljؔuob-Ȱ%X2h&lDBjjci:ZXI]a ηRP,F.*WsghK{G ,SM4Gnf rj a.m f.EV W\KrA]z}&?>䫚FXMYQH,r5HEJؽF$2t5auiP9HW ҢޣH3Wl@F*?i>EڙXyPZ/ٸMS-F_2JFZ_368 T'4Q&=3>\\Fy9A=q(^Gz)sxdĨZºlIص?itT@|7kIyg)6Vr`P3P3YHŋo੬eDMWSQKaMzvĘ 2B)]-Ɨܹ:u I|GRR3!.ʝ9t*rEQ bW M4^ox bbްBom`i;-ch;"u15{!!W~qD@ P)({aLOhXƼ9dt|^giRs^; :Gb_|x i`%ڿ?]ꗣeNTn{0H~4jGt%YۃwL>8Ͷl{6 ډ=ӿۇX4;[bpk}&rnAC pc_}l qKꭦ_Zt7qͺr,Qux\?:\^5YFj zBo^r;~OOO.|o^k@g?j4[6d/ӻ7&7 |'SѯӠ6ޞ6i35(:5{G6֔uy3r9>;FR}hy1֌zx>mpEK=v[Q^Z>z\@I^sByV"TheLDQe 7i /,/$]ζIQXV8-vA]P3 A:i}!2)6!Sd6DiN6#B'fǖ_t&|?wr7Y n_ N#Uo}.`k }n[BKW-*g%=.S>"cpEcFQJFɧ|%SH aqnn̥fiRlܰj虝K ?{b0W۽NZ^ ^73{;BЉu5}6‹ nCZqN5jvҺx>`v9|Y7twk9}Sa^ѣ_^C摩bF7_͟(۞u:ZIXA{ZҺW#h AVd4j.9kakE+@lz@snL<c$rG+ɍ&t)p Xlsks)Ǹ[׮RĚ׏,knS.fC#ب)E;a\[Eamܚ^И .؝o,7%܂%ߕatUs; i;eb{p$ozR.ёi:+GBwޫp0 gVM;}E4J2-єbBs=ŅʕР(wDPY-TV BePB%_UvSPYKS uZxrH pG;I(pU^L.f,m+5:aE:2.2>rZ,O);b1SJP\oi$C즅;4N=}ǚrSwھvNO3MxNk`bl~7/lwg tOfWٺ]jn8{0N#Ek8ԭ/sG.=9{YbwY3ǹy'oFaA3b?ۡ0Y\S+3,1"ԾaO1c}C 3HqƏ!ܗYfRQ A}38g=JI|̧FmZRdu>(H&ϻ&s 0sR_)Ffq-ZzY*aM<u٣\ $/̛MmK/[;'í١mӾ8{3Sƣss8'_fW;lA͇CZt˻97ôA/tQ;(ysoTnҳ?"(тL=Q]S-LŤy`d'O 6ŭK1%a.h z3~RmΪ8iݤ{Ԛ,٭0!^ BN͌xlz)".%.X(Y:mxB3+xV̺xαv26{\2 ^P^*chuUܦ1_B*ؘ_yF'{у~%s _D(R2,/:Y7nc5fmB I ;x2:1':a}SZd@W9J  9W'$-xmzfW=5FfI 9vuF6 X+c>W1fLLd0ko[bŜqMclpx6 sg<"Ø.]si~n-El9\v\Dv9Y?P2-djnrAsAQl3d%.|W>^hވ1;[xh0„-~Hj↡˰dy)5P㊐M6iİBGgڡS dZxb˄)0 CJ_ _GRcCuW XRE*A]mo#+Z[^ l$o˳f_rbKeewղ4`^l]T*:xpmA/d[^ !gUVGhqJiO? _Q*MگacQ13ќo_ &jr"9bzo:̆3SrV29ڃݥUJi#ԷW7j%57ȿ/'^:IMSh8\}+զI`ɔj]ON#?r|7Ϝߥ6s*~&LP?.8z_Cͤ9ar$:rLB~Oia@DOf,c+wY8\M0#uNyO}IQK\}K VW}\]O5xAyl١1%NX8T@iO㮾cn_cʡ M5bP.w\07gץ:[,fq?-ܨȿ?cjLjb[b̦eՖg|xo/&C WhG|Q7x ŧ/AAC+v@Pԣ\,N01۪&:{hjC'#d f^./iMϳ]9i0|qnCZ% KD};y-/zŏ*6 %>YGlZ~:Z~Gh#S2n}!rK*#itP6y3Jt[բYⓦb8W讯?:{}Yt#}d[VxΡ=_y4{rYO5qxn]]ԫ&9FGTim6kUYWf]uZZhAC%GO6{{ ΙTcIQIh5Wr "fYA)as|/`rF-zWK\.qUjӒ}!I3k6ίzި e{ih YRp-@f2"I2 HB9Au0'!xÐ#ck>#̭j~eY4hdMdRe`)ó [$b)cbNztE'U-(% ǘC~8(#G(Q,lv:P߼ N?.%JvУ*_{-)yFx]L#ݶw(0h^.b-3ɗ$^y%4J$X邏 !Hړ[. NJ! ٠!) ER j66Dvb'W9P48H9Ff]Flsfl595Պɓ3R{I-A΍}U7w/V6PNPz=mjB6JqE]re3ղ'P`*]Gdd 晔F8;76| ;Ab6 qNu3qE'.+FƗCVI`V::;訁?!]2_RM%۠G J9EQ%I2g:=bbX՜UdN} Du,m+6WS2r,{Hrm~03׺ {q#P8q=xk$լG`OYS$l`A)uЌSV6eXg6XvNi Z:qvʱ'щcU CmW@Uco۠*힦TFV>75(N6BQ,3 dv!eY`LGS:fZOyJls1W;$W|6!x. kPbJ;)Y8wg"yg2[AƌH^<~0 e?vKdq3ub%BMI FdSHYt,4bmu }5>ײq&U>uh_,S`ڮh:(QAo:V8ף[Amvp0-=8!^V`A1wj ZeToGuP"ʊ{z~wq&,)GVۻ⒐b){)f6a0u6HNq4Z&0 p`smag1*vߓC<~mPrDcnu4HFKH )8The]t\G77[RSca>|,LoqӇkW4W_|ytf KmO6:ojsb#'fDŽ.2OFyŔy֒.b_=Ĵɗ<զր۫\q}em)T^|+mMʽIRnj3/xN+ tyTsGQ W*˴oC]ݾruuUNTꦝՁO^%Zé礬XJjrAVAXe]KI5UtU}L/UC+f*YUc=jfc9٧PéUe';?_%ܥ6ͻ͔8i#,ŪU`>SIm~V9:>Q-OK+?͖gv mRz/w2+nn==eTeYZ֞R(k|%fũA-=bfȥ5>b śS}Y}N14> (0bl \{7Kb65;`τ2 12s;cM34ʹ]+HZ97:oԑĈϨcĸ+dzs{4s/+[qװZt9p 7r^6 ~hɞ۠3gw2-g a /N@xwaj ?S r9ꊿ5zLwVIr~qf .ˉ,F\U`V{c?%8pجΝ]Nk:ƅ׊?!ը]9w1TdW7{5_.mUuY 9>ߝDQ$x Dw=HB[PAUʕBhLXQUtyƋbdVvVSP1GJ*Q RXGI=tͥ 1<гrANc (v Nǻ#` xlm;y-8yrFZګ?/ĭ{|VZeGq/jen,93JOC]̟7 $z),\X|e )u.sT;?*,;^DzcmdryӹvY32vqWm8$'qPq`IKP?{ƍ /{rvG*=lYJ*uTl^ba E)"e;I忟Ɛ7C IbY"9h|h c^!HBq}YJB!K }`0;W~xù38 ?!EFx!,Wj,; O4*MrTg]Jr 4izPTle925[=-,y+Nd1k#?o,5OUּ3]x;}8dA^ݍx%~p)֠Bg?xpyaVmA2oa6{ogZVGZ3YxMd=^/YJӾ#3cۉI'm nFQ{zsz5qzk2VL.턻qo|JE`0tڔd;{r"J|0GZ\ٙ4>*Ke EƙČKkZ%295cs= C`,,J)SڛޟWs+9& ?ON7O}/ tj_. 5|aU ( ǕiBYΊ#HA~ yWb:]=<ñ[ t{75I!az ߦ嘽_ ]ˇZ !H=AA5H]$HQ;Aqr-{e QMZN <_YxUhc6*Z>RJwWsB#L2K* /OAt Bk2]jkA92ϥNynr*%Vqenp`k 䬠Sr&b8P'+xR.-SKU_~ᄍ pTI1x8@ +7  V0,UzLR3S~ח8/!տ4SNx"͑ɭ`d>i.fZ(0{ NX@a݅{Lu~w1cZkb1knxy%s0Fr.M<T?'=`y6֬dIRhb%H ty0 X-\YXQ<߶a$KA E:lq?$^($.(Rm Ǒslak&Iz4ݫɻEbz݂) X)Pۮ+@.bvEg7 P00͒0zϮ"Z1Q{|}2(N^ T")v2(TTTygV͍㓳Qop!gLT k} Nm3pP-æ3+BB026&OSgڝLwqy:ͱL32glz'7˫oV!+P^jjj]9K<.T$Wm/͜WaWLvCPpog:߻nepzO,lG7G_ޟ1rCVE3Äݝy2=Wi7",CL eE0,sɜ---LzٵVX9_3hLa+ Jk-Iv۟xZP׶vKhvkBB>sM)vnq9Ge(_0H ]pGdpw>J%RcRݱi1$jW;"{ K@s1!W"D,)Y8uNJgrc0Hy !:߮l~ kIAVXEu -o|,Vs|T b1V{4(uZW(DVMHg.I2`YnɃb#:cn4BG'ݚ\Dw)ArU!)UmE"dgH*6uX 󟗗h|/&*2g.| 2uZSJ#{8c=lG_SpEoH%v+[l]mqԉrsFY~|L&O{|N0K-!v#rҭVԷ/y WھojJ C4CBճe@UnPpd&fGժnV#|p٫W'#o A#Tٻ-C7{٭yg,4cDuǚH%i$ZV: ( _LiXP :'\mV[6Ges{pywlZ~L>F(^m`Dkxn8f4}4sg`zWOUhC`dBx 1;>Q0I=L8E*(FZ B:V9}ni )?Z) 2='hDc0%R\ЛH \jp-h^xZ6!#ѓgH !YCkl譿eguI؁vzdև|zX٨kP'&m5BJX7sX;v I=n`4f6R/>QFIFe( #&Bד1>3ƅ:3&`:f( V(0Sxc:5bYhZs8Cǐv8c0H1DV.@>kƌYnsXi1l(fxLMu>Ln>\6OcQF! Z v|p3}'ѕd4m\'##:dW8$}Ny@eng{<4)Z-V: DZQMQi7eԂ@#uL%4^6XLaTR`dۇP6ҽ;ٌzD#IԼ|MVD0(lҕ(lȎPg(tmdR>oWJ-Hpk4E$_P;Mjca,)LI #0ya'{lb`1ì5bb QD#UśͮqS][0I%208JﳢZYYI [) b|Om%8N _+(mSffDOvn".F3d'ՐSx?2և#dG%ªm'p3 tS>yR۸ eڐpBච%j p2g:6*Ni;<xK%FmsO<]e~E5D0ڸٍ7p8eAhF+m;|LvE چI"Sի҂ECXUϧW LRE)=>րG2O Xnl[2 /|U6!nX-1ՃGQ0Ѱ%1J *[1ޗ Na½fjo5E^oJx9AMFCr?5J??U B T BݠZA7,R\ TxK:T 9r2 24~ZJ!ܦSϻR{W^ւǽԁ{@bKCM |%f !~GPd1wf&ҭLJsJnck큁 iË-$.76EQZmjDɃ p~n*MRU*-tw@7gRіxҷ [?>'O9"y2fI5)BG09YK1ª kk"j 9b{9LQ!` NƬ\ZTPG SJAKz}qx1DIF m(:/sI ^9ZA+QܔcQM'`|lcexlF5c v.6bLkeJYx P7r"n1-bp`n sE &єs ;`S "Ϲq|g݌/\>?c`̍Tڷ? 2w=횽% Za)W~ o`m>ADdd/Ž#o|&9#{ë8+z_όd\i~n1?8 ş?ݻ;t?].ͳoCC£ vGӬ`r=+dr$J +ShRՎ#*Nشbm a!6˨*wd |K[+e`FIYexwd%3G;b6ȁ{/bS\tؾvl7̓=kKšq \w0KzܬobiImfO kNx7cMNOzC'.+bmA^쟓\6[7}7I󼊸„iH34R犂 ;NaM{"Q^ #8ѝӈB pX \LKo#OZ0%#윶!Å"\ Mr"0H6;D[%rB Г "Vml/5[W[Kʤ\-Fm;ڂ _+U|+->THHU)-hA~6m뤆!Bz -3>0'(ck5kvdJG 2C5#GYw 2,ظɬSNZCtj㚇e(ML7uðf`?k]XY#verwUsA (^h+XO҅[{[e,:u$ѫI>2ȊOR]..Pm5ﮎOl 5itY4!cHv= Q*okmH /mس`o.6_.O E)$%9C#rh 9CZ(,{U]622[[w F:SZ98c(-Q| yf׍~*\G,]{;ZNzt&v_pQ{+5Ζw ; }~1S:g>}NK&~!콬0>؎Uy[ȟ_Q*:;!KDǑp߭#Dr-9s}; 8UiTުxꍯgj+A֓Ledfol~s/KUw+OkCƕ]߭e[8 {gLd *!nXb̭R qt}~;JG/>aZ& }|B'һKxvQ6wGOQ簸=\ нSt@ R+.F1w(wj?iHvDeXqeI2B]9 JeBZyyOZs%؎ %gA|TrUxN65y |qC_<;;[Vt}Sى:BױpCfB#52eQ1%a/:;s"!|Hp%խR$Pr'5j=WJt@I2$ P~V,ɓ+ZDY|l:7 %Ƽ集dUIC}T<7 J`l6М 1M^Vʼn8@G}ś)KhUDMS!N)IuL$@ 3($f"tLIr)BHRסooVr˞91{!y6o8tb'Ӌ Zv5)Zy%Ir@W}ˊP("J j;d?ּ^.}٫~=ˋ J€ @D |Rg/#|K/~\߾ qn{G%ճp%~3N/{HSl\i). 42,4Q 酰& ر( |JSaoF Q}v犕 sJXIܖX8YW. KኩVdgջbaeAn:L2_H]99=3AgJ0I18aw>ǺA@AzJC&0(WYei^-:R@7V-(uI84shdH_h<%6{ۜ aBZp4 ̙B'51 dģ'ąc 0 ICD4{Ԃkbk(Uh.b&#!ީ$|!IA#| ȸp9Cf7 5+]z+bCh-bseJӲO_l|L#cT u.b"Sus|Ga%PD l.o6HNhpʌP,oKxkOP{z[}sicT%m#M5Ұ=0\d]w,qd-aiz˞G3;MFVegy8?//.ZXN HS ppi6hDž8VP+~dQ-l*O.'. +hsWzZD`X+@ -U!GɁ?%i q0[dWGpIIFjYV@ *0$Z}ܤ{Y6 ד,P;Ԕw0#IU.@v[8mi_rIX'W_C_Qin7>Y%t+f?)K.۶wٶ˶]ն-F@uA0)ڮRoѣgn֍bMq;q*^]'UeE7/V?RDZ{̲ wBH6HcAx} ^3y)kn) 9zqYN&N85HwF`mfR $g D[lѕ_i&v"7V=D"*yDѢmz'1ɨlA\3\#)>wFOD ĵBOJ*RiyR['J +5Qs1X4SIZv)cJ3 eȎȿ?Ov+gnr$02]U1D"z 2+ /g.hMA$ .y(u4ƣDDK*SUNT]DDNl\#W4$W \ʥqQBT҈zvƽ ]\NC$[QJڪeklIKASB+phnZB1fPG2'pn1Kn nJ뇟RaQwn(ۀP QWw=, x`/ ';W;; y捒w/J$49J4:T&4^$HVi\>7g{<.$D7Xb\ٸFQzRЀ̉0mSX0-NM3gv@j~Jʖ t:ƜB]r*iZE˼{l>Y f֡b@,?}`ٔV6NrJS`-T2i6SY=C t`d\MދgJ\+s%+@n*R z!icen }.}副GvUc" ; ȚA4벣ʎp#{j!4(xꞛy8ގn͉Rq;={Xԫ"@ !u,10;3Aƍav<?a:mnbymq}}Jq2_Lauo`|,sޘS5x1Ɨpn$W7>Ɯ;MK?c+@h8iK2,%[xU羜{)ز8?hțn:ņT(Y!jEt 1SБE+UN2@Y eFd=PB#DXmŭP8[w-3j̉ 8Fd '""$AV 'oZRBL2:N0фq5Gg-v98BEV[2HiXD߷' U[+`_jHGւTﻱMme2+ǩV1,x + mrЛBŇŚz]1hɯ$!B63W(`ΐ@R?-irELC;)#+XJ;LɌw`g"k. S @Ҳmsg;l M`X)mWZJSCWg`RV 8n'"CqNo,F!"~B|TDI&ɲ) ,BsJ-t_J )0U5Ìu&%I8`^f HP Cܧ|<*K զk_X~iDr~Q0YC$> !99O(b<[& YsaheT7$9O839OjYGTT{nFVOgPďO`e"X(=iތGݘo-U rycm8"r%ܘ&8Nl `+Ahyg20%̦!lΐE, ¥$%{Vl'%"gJ!Ƴ ó{? ABoҬ EF% K*QKg _@u{J1Z7{6|ZX~pY($q|Wr'Eߟqc>Nc;0D&|<%a*?-Qf: cxz`Ħ~YZe?gCѕx˭f5?؟Xof^~f<}0<+ OUgƒ_NSW3'>ӓ_fy [`x׳z5˗Wo?y˧>yVUqGTXT:T z֞GzfcDٔDNk7E o{ _yozG~~/~yzQ7x17O^-_BozǏv7_P^?n|}~6o{6) EC3b|?)oBŐ,.蝆h?޸ӼK3?|N_ߤyE\9O>N}hlޭZ.`Ka:(4jY^zQT]Rp-MR£&օ.ӌ.xK7۠f0̠Xݾ Ӑ"Lބ~B'P~8 >gwi䝺_`B?90;u 3g hB,>̍}l])~sFyK?A1 A\緿l ꟏T7s<c31`8@+S&zѳgC_^Nat@ɳI#x<c Qxc0 >殘& V@WhN78ɿìLD|Q41<^g_N_ρ% ~?L{=Sq f-,+: mC㹃YYBf,k(Y^ܘ-Ya\= -vg"NO߮iHpB 6T&F"0#HͯDkb R ftaL0%IQ?a 47) {3Xion0W/Nrpqo'\l~wqh`tq#6ޮ]cZ`SD)mRv3SCowN/ M_L -NzQa3`OwܝMégmkP.֠\*^O%9KV ?z)E\o{VK@ 57>N+VNv)穵HU۳h-^o.09QK f;dTfa] 3 9wƒXe]z[010ܦgm v'ɸ+T(;n;Gs($(;nGs(9ʙ`wzbݳNyN+^g RZ(WB`0QB 631nQ.4 ,»EĪM 9K~]}s 7rQ&qk;(1jHk>hʓ[#:c5cuƼ^:xMUHpYVbۙJVAPeDU1\VlG(4M,^#G;8IP 'xȣ6jvqĻ]=n/nn#p3z9 nzP*$= $ >H:KlIj?p:UlRlԫMp8CR*.b=QC),_,<9ɲ8'0_־s;z>c/GxNm)9J#1re'OVGg/8U)Shtp;(:Ѣt˅d<GicMh;CNCpt-h=ܗ*EΧdtQ(ɽq_@X<7:u*imMp8hU~Þ5ZDFdVЪqѩCS7VaڅyyUpx}MӺ|G*,!6&N.-MˆO@5,NXDrtݮ[ې84\3Y^[FCV_tEX >#X OgY02Jx*{ YzъRB!e4;1 "tY(8d~݊sע_%7Ȣ 92Hec͕f 0d45 2qfR#ҹEȀѷ64z ,cv+ڳM1ۻZU㥸ݵ>6պGx :;0[ΓTb$2* i/,\РRNZϸĭJ|霐sU%?٦iq;a lJ1&OavwDbTf00Iw\zdsbf ,pZS z-*a^^P\lЌL#.QND[q˨`sdk58ӛP,kIf A\1hBQXѦk٪h]!E+`*9`C7vL.} JL \Y Iܵ#ɌX{`D+d l!2IᴖdsE]w:Q ƴn ߙx](lK2Xo9/)wV+*S胥 3a4TPx 6e֊`]a}Tpik(S[WM+s/5*;Lu* Dy,=gW 0a V8&IC޻4D)elc]|lt.xz}O-E:d: Fion0 B $7~`X +.Nr8}y2wKMf? Wҽ |L&<;z'ܘOq!; SXneO nx%R\c g F( M$8瘓K "rޡw^\v7Y Jh76nnw1 e*Pbr&/b4 k6e cW$//^?|TGG_j7f\t^P l$hMJ+a+ٍ XZkxe7Ts(X[.^}S?bC6@W\%Y"]*Y?+AgLn~ֽ$D%nD+){ up_`Ms{{ в7u&LdZChtĥHڇ:K^wԕ5)ެ:4ү$R9hRۗI"MIA;J9C7?a 4%doG_lPְ@F5 tE+$ȜmR8WG5  p)#Gќ4I t,+yۺMu?7 'f*m:R.1Htwm>(M6o~"2ER%- ^iQ;"lMѠzTT>jTO2 AV͌ˢPDcd,qBYT[шị QGYQ$=* Rqz LP\ZuzRrET'K"T;p\zT_[Gq.3Y$|-Vki."-q͇X-q92EZb8As#ḭDy"k?y{>\gQ3*ڞw7! sЭ I`ƛfHR)ƾy1rl$R~r7:m+pCfi\)e` "S. ^{;ͧ frŽﳑecp%AJ%Ue/g6JS;g-}ko ]vtwUw{nPj ׃Y8*ϳ:Aw>ӝ&:/ kP6`p3!"?? 0CX#_G_.Tehw\mU6g\*=RdNLB`xfiR0aLڹӊ&NK0 0s[bbOyޚÔw5YFʵovj<~?9E#F'vbFh0%xkSG-TKpwV1'3_n]2+0A+̗Mk"5?J)2='BKSgQ:֯G0FQ7SʽYơs!C[bkR׀P"C/( . -\rN2wRyʭ.r Q /(]?8dd~4OȟrZs9{7H*o/yNhRpHh%|1|Eȳ_xhY L,*gb0Ofl~~CYk3c0s; Lp-BÀӳg3W/c_K9"`UUn࡟Rrr laeZ?jˍ^]߿ lu4dg!g3-$ggټp+ m| aoz8? `i葅^O_KF؋*\6D{F0)V]ɞF-ѽ+52<`4B؈(;$O WM-Dn#k;x*2,y 9hdE/c@ME;k""a`cxID^?QS~0:쇭~:쇭~uzZ=9Eh\=t2Ga9í:ʝ43 %]?Hm|L1cHA/ïr'\[w˹M`.x_$e G?xBAr:ue;BP5ȭzyǜ9ן"Rt[ЂGw*#ִfi졨b=$yg[:@3Rz3\Ôx 5o`%;"@-0vf VNJKq[YXb(+4t"cH3| Sh?1kc6jL#k5>zD[Gqr>gOO$U77|*C 7B%LpN26;nnBjH @co3\qT0/X45Ya1RQR. BSW&x ,/̀ -\FS V]:H+jg!mp]hhJ\p5 } ep.n}_$& D:rLەs~"AF<׭1mg:!L./}bZ9ɵZy> <\ꯏ~ưoH4ka%%>\Wk&~[s/FLȉDzӁsHHV]lJ5n0 ոy+DضH aǫₓwZԵ2:`b5sWd&c,g˼C)2Xن6+Wf^̛%JJKN Qq3̌ iTZWi:OeZdlUqZp||ʒ^C+6pb!(}TyLDZ'F,"tpRsP lN6÷Z666Z i$RqFYKש֜FQM9fBmh㛜U3g缳(mYgԓCܧO )Š8&?Z7WbͦN"-e9U{r;/i&>Q2-1#@}:g(uhIUe Z yws TyE\dl 3a滲4цQ 𚊶㏝D2,[fsMkV.ed/jńUn3n\U.`YnDPu"!4gژ A.heHj|[q'Isg9BS=N5z|q$Wuƈ`9.Uj-.(!Ƿ/O?(<7| 6h?N+d+mt?Bm\ҭtkIe7[peWz0E b&nQ#Ǒ¨f-D4XX83kcuDmC2b}rq# 7B AKMd_;Ր8[Z%sC ѪCk%yjKn\LiEjB`8S R#GY6#6HPG0J;nDyH9f&FLrA+Z ȹLC vYNK-8p3 p&$Ù~LB vҜQ 7Zϱ!sgcNJAwBX].p<bB!"'! 'D!1L/뭐#[=p~xOUo[0LaZeZrvJeHJމ\態[ %WƇs?P Ъx!=w)~)y3=gJDe\` Θ3ol=SJҩ<)U6.O!јRBkŹ^iYꅔ0)AC#N+< )#rlS@.KBN Cn! \̎gTT*"qfAQbjS"rìSB-SxXt"p*p` # Hh'abׅ4Z%qn9IjpWLΙpJp<˽%F LE(r+/'ZHK@Q&A_'] E5<JF4i(NGs", Y@Kϝ+#`pHr(&KŗVfZ.W$WXCe[pM²{\Y)0~e4m? MdNMm 9J@9gP=3=!dIRǤ,ufp!xaxE( `a[DhVnVdP),jX*[Z GStcat9]jwX 8'|`؄*GtPI+@v#f sLvG3;< 1dv}6bT q} H9ȡXQ.w"IF^@Ata7<9\y`6y x+{֘< $2i(| $%$Ck)'mrI~j7^-B F͌\_gZQ{;?IjwK^L'SY bog:yugt!Ωl7~d S<A^7oXCϔ11u~tқLìRbNWqjuH~E]nB5;`'+4*ج Em!TКSXƗU`}vdW%PΟhd}' T*bWfvP6MjD+TҰ64VlRA6AS;L'M.8UɖjO+BHH<(bA*ʹO F+`̄dG1 \1('%2 )4d:EOMJoB!b?r3L:޵ьBKu1Ԛ^U/UҥPcdw1ԽDr /bLVVb!-ul=`<;gv0 VKD)McoJ؀`#{yo1ƄGܞWV$b;ŭ`=@Ofx{ @w)" x#Xe[ Ir!ʶ $t9 m *ȶ&CV'RRǕWAvs j9:d>Z: r.Yh3Y(݇z^R1 {H9"݅;wΧv0w#: `Xs4ݹFu %6$t֚3nԏo81Q@4]̠W Ůr=1Z= TU}v[9S99^`vP%"` WFGFq"_;{Nבc`J</ibNS 0?D #s"/-݋tV-njM3h~[:vy٥d]ɺ0ua$,Yuh{mSXL<|ʽ~c=&7@pj?nZ 5q6,s"k@ ` 6H&IuQ 8 q4w6F.  c|,b󓣟|6 0}J.7_F2>"˶3Kf[rL:I "s$K+bd֜gEYWN(HvknG+j`GF*p3\Ѱ;h `t1N95,LZ|ubo]xK,*ٔW#:\hq=~߳ns{isnJvUwY_hV&9f^"[y3XgfjJJKgA[TXo;gv0T($6,w *D'vj*e&N69xGY/iϓ-Ix bb:ql |'WytZKlqQjCoJ{#90%1Wz.+^.*|/ՅX MHa oaii25̣rETU,U"J=Xc#EmE[ڮ5 ^Qmah][.Y]]vQUL{ ЭmKͼ2\FpI:)Uꨍۡ8HpVe)U//):WYF폄DBţQXsMT8Etܛfel}48Z:pj' ϳ/!سxV(A2,k恽b*ǁ 4Afdu]Tʽܡlk7эK#ӌ*ޱxMe] 2Cʶ_nG񈻔6MOH϶o 7 !X5CJ9eԒj4@Lb4z5W17q0rL,+.ypF IMP|be f5? QqGO,Lܦa:t<8z8&]m*U42w)$ߝmu"\o9Ǻ8c"~r \6xf ra-UE㌺ÆP>~Z9\ˏϒOO' i2Mox4 y'i2;>7s|:߼[W^~v񟳳wo߽yv~ B_ '8{m~} sӟ(n~~vų׿?gsFN:\~z,bX5oq30L뜑.::ͼ 0'ѝ`4q*GU~Mõ/|zn{byR{cT#j&Z6NNaB5J=3NK9=_airń7]z&Eo,lzjY?\ pa.0 DbHH.  #S J,}CI|M".]{|3̷0k/fVz&-HtvM<_R^h2JU#1z -2QR)ĚT\Qe<01.B#+ KダP}!qNJ}|ߗinj4{z-Pvcn(\5'"Q hd'Ѿo^>Ͷ$g; '|\B=o*˵ZET `Gcu#F%.A>;^Hu8>MiD5%T"EpffG7P0WpL]eQcH:|7}2Q`>B_mQmG@=JE! HZ'!؜ ?(\J) ) RJ Nĭ(N7*VY EY4z3nޘ~\WN*0,8]cD'\c6$}^V(4g/]NB2F}! l{sk0sjF|u9픣j$\> w]nv##xK6?VK*>GS=Or`mOD|togR|[9 mW( Ql>dpw̾ O=D,UAk%B_:Q$؜KTSBU7G0vxZ.wtʪקu8u¯7$C3e~6>K%n]ץ%%#krߜrAȈ|J {L!IǶh>-RzuY{ň [1쯻W b`<)V VmW 9M*p-C;gbNfYnȞݎyݾcn] j/ѻ1C╨NsおN]'l&fȑs;Lb֛&Q.^*zG!(NFC H9}sdU^%_mR6ҁŻG^ !0z{#$ =_Q:%([OYliP걡cã@ 6 9(KCm*ީaYndRCNW;ݤO mm_I:sx3fRւ[N|;mc 5-T*0=4e )uXIU.Şj('XoZv]ʚӛJǻkQ6-d.w c:H߉ 2q{їq m.V]pmz׷ipy0Y%M5r6)7K~؀6vײ|L#w36Yr3;,uI:t5)UM2jSAP;lӆbUY@֪X Q؃bF;?LGri޷WB/lum"r^LB+x"-$QYJ[ˇg *SA1IP73]zr{up~ŗ$X [bagd#^D AFQpڽ'ȳg$pVv2'$Z3Pwx]PLC6ZnkȬ8RKb-FMu`5D6 \k\.{XkɩnSκr"ѦtMökv4m83sC;9x$pN{9Dr葙߿NBdi+3#5yY?B!I3Ev"+ ?ԲWأH#<e!@ӽ^'ʖU?O3cs42Bla,u=}-PO3?ۘ+3D;ו;e~Ȃ(ҴXc]ꌿm'j h#TR> MjL9 O7(kQmj'ڗ#]}vo)% %N S!S&;_.[xNuovGF4C(BՌ"ָ7\$=r q*Sa$k%_Kn][2q};_$y8s=x,E!,OXg~TV`%DtJ` Г$9 sc(twL`әQ µYD" Hr[jڈm$}yl-ou)dB7<ْ2xpRi?: ,(V!@?g] I48_N͡fC:Syuۛ)[RBkrI)Z1''5ڭ9ldgNc(WHGxmtveshj48\$B}kDhwYn3_.6NwʶzrK42K@SQxskCR򌵾s4>!;5@oQu{ŊّQs{ÉZhf bBElQNݯWKgzc[ <$PM S<ډ7i.xc@hnFi/EM̭":yN`C& BVҍba/ ?R~°{;(V-@W̍2GeEzb$n͠_]Μ՜բ5z⣣sԲ$¾D{x(0Rhѫ)(MG+PMt~8n~]m~js+LbÓ' H8Db`Yo DİϚ}\ # I]ſ6qU (¸0ҕ$ 1UK3R;@G|6M~ڊi/ygva^TWSO뫏lP %< B .)xSURgdH =b %LS&:="UtPN9i WE]@sfK 4oɰs>Ă&]66ZH;6| B$Yb|xxP’#٢DSO0jGY]3^[HJjxQU'l>7B(;]~Zlʉ:V!h:b6ߌ?U= C|HH,4߉jͨk} b-PkO6ཧF ѵ)n\[*[7__;)yF߉bGmoS1#ƿzRGŸj+dH܂lEU/P]+NJUb;3ѕqQrQf_6os_62DהEljcCs Xa#bMo!yvIWK B ;qZVOWm73xI6xץh=9oQZ㼳UA 1ǩu@Hg4S VM3ըbKY&fTgQ wŎ yE%V4n{_6%߄{[]DX S.>%&coƘh siQc"/|NnuDԒ|gͨɐi6Qj+E݋)G%㰈_1ѠʙH(XI2qi0zX+$='fĝ ֐mǙKlu~Mk1gz7e6^%ə_6{bejKD3z?uɣ|۰2뷮`CB|E ͑~4>e:_-Ec*܌p,~l?/F!ELb6x#r<1dBXz ygߎnrЁ=FW<~q/|7oV_{+qoMfߞ5qn*dJ*xq ˏx7# =|G?.wqydZ(ܗXQH.~r-7Vf_y ARcT{*JʌX#WK2Z?4|2v~cbs,ZJfdP>.e< 2n6Mq妄Ro3$@g,0 0u"#)T9' ͘%G8>dI.G8]{_> ng׾h~~Ho xs/"nlZ> jB%p_x[l3_.6py$}}N$U"Qʏ Xl4%"DNV&~̀pB" IOJJxoZ=Q !VnL!e=/EHLNY[I|:g DW mE1Eob:WүR($p9]jd~.6.dZ$~Â&W_Z>$=3jOa5Ok:Ecl;}pqezw K(;y~s#.wk=R.˔L9* $uRe#D{-xF-}GYE.`]ε[Rqn la<ϯ ߽Lq8# B99@1|HM88E/Ha&RdRE>~/[Rt;;Ϟ*Pdݸ=A=R'= [CXe\HFP:JRo2g|D&L҅/Ҹ٣$WV5~ݣ]5ߵD?G߲8TkA7g߭_(aۙ_n=O,˹on\拇d2^WioDg&|u?swɖ3@NE)Ww}۳BJ(Uq1$ /i\x?eƦD'%1j eډ2[pMj<7j.z͝0"H:?j#W|7%C&VJ5LQxsk9rfKoq,-rrAfAQ e"gxW;U^2U|)DTPTn* ˪(CѲHVTmp1(N?3U2RqR'HxsR>W[/GK Dksf/ӛƸfc>\'ǽQ-\^%:!65wm5hk o}yu3: 6A^Bi*vVPTC7?tti pP*4ą6D7m9x3qr5>g*Fno:waTu}D(pY[?xO4OqOO}RvtֶnoDɛǑ7?1y󓐇vKI{xd=<$ J&/>u_-+};]Vܵ|?1]\&8uU$ 0޵m,"9o{Li:n4iP42d"K(97.dɤDrI(rI>3|vwvV8捥ZDְ0Tۡʃw]gCRZ dHXy1BvVMޒzLlbBh9BhBEjMUZ\#_9ի5q]4˝nۺ޿JsY+UG+=?="[bTYBX\^h1RJl]mWo&$12v5}5_}bʝjOS"+n]uwȰ&d~x*DLdtkޠqL0::hݸV'ZW{!OT^ل{mqP),Y)b+DP徦[ϩe3/ 6w|uZAXE+P"PW!֊jD 9XȐC^C)RB0IQ)..o]sWWqB-)tM3D7⮽_+ڴvTN"w߁bTeΊd\ia}|př% xOuxm?oeM]U/n3~o`Jm:j]]ƱYvm=յ|]xd&Gro$M?M)PvxrPX:9hZCV䆐pǪ6ܟx Ľr  -0z#| aN$*ݷĒy! SO>2#}>|0kEy<,j#^^(?rx ^+E  '8TcRC qUx"% ȱȱ&/\>zq<%e*4Ks)OAׅ&]JLn7P̢~f>m{ǁ&'`k.sx)|]xfd2܅`kGkZa`6K;5@ڌـ6fHA@@Z`q=zMZ#FwKUZ`S2a.gv@œdX GtѰ?AvCIl&&)i.y n@7c[wft*кv*7P2{ݥ]$ R#od6T ڍ1]L:1fSȞ }0PL,fZ[IZq;C=3`oXjF^x& \P: o [ > 00j!.| F`t% p8Q,g'a;)kt)wk<<.IujuyDx8ϠtAt)1[2A=t0a2w;לGS3I9!Vu }Jp^(>1Ð4 stzF`27UI+Fqŗ6:ʊcɈ9wNcZ}˺ߌ:GEj lg ~69`! Lqb#ɘ$>{ocR t: iGo\/CDžL%5dn pљs>qd&"uFi13M`|ڮ'k-hnscN7_Yr}8||=7aZ{?8 ~Kvrϯ~;0rE02BAąya$I(FEW"/>T!È'*Ћ+kwS2>ճӋ__f"vz|Yg_/svryNj7;<|{~?{js[Oyu+߯.;}5A~8{9ޖ=ƆOO~8i``IǍ)OΧ'~dK *-#Jo(Nb6I{a^¸̠w?i%WOh 03D_tf2`Ή}xu,iͯ8@m }4A˸;zַ$YF- ^ST_Y *q2}i⻝ aQMB NO./I^߻C0{{/ g s|M7miooUh8w,Sl ?Va_#Q?o"I}'>mw\.x*#"4eϯ $ap}C(he$7ȓ ~JVnTp@xCF6 +$>_nYBEll(?'9l$~~ _tA#&TΥy_|p(.Drb^6l.3߿_?t~!,D   8sJ@ρs8 Rzg#D!{8 ,_H+$#mKy"aX!!/yGv}yJ06s!0DCGSM IaAy"W ^6I>`~LW._}a1wa{۽o[& <3'oH)?|[朻o?56Ycuu dbtOaX R6X3*Fu]lu~rY R|E!cw!Z{(D?4n!ܼ4YK=-5`(oLawuxA_ bj6^tr,x}2wt3{8Hg!h8]Fzp|y:wTI1Ҷۜfm]isaߠe)RMo]]?Ȓb,yFM\qװPv8^8LQc('X($]# rUT>G~bD(ӳQg Y`krwcRWyg3έS![JL["歼积V̂hr*@dkv'x~7G鱵qF}2k_mBߗspE\D|0G|~cy}.x0B\fBcp[]xQb}|^tܫ.szhYcUoni].wL&2k*<6ZLO g~vDˏ-mCKseݙ$qo޶u;B>oyo?-( a zށ溕FS< gYR?au wßʥp̡{&\ ΧOݮw&4^oIp˹a7pqC ih(ΙbY:[N…\ htfRfȒ\bЇFռ8s܎F >hJ)UMP "\e|%c2N]3a I"B& 9C1UЎTSw_g }jl1${n-mC% ʋt龭JAa௎{ +nC/r'mag?oض%aPw/7ˆ)%up1yJ8kZjJWɔ` Je1$z)a-"=;XX$yzli[.Ki;JI= ҿi;.D~O OSD~.D_J>U6[E(Sp3R ޼wid$>7 ׼"X.D N.21x]1\tz[v^)m|7,3lTذ6E@9؜57ι]"c~~$>ϼ^ xF!w;/iu׻X}[#x2jzQD=$BC1@U24UE~j|(4<`ԺƣM;f>\+ɫq 2IFμ0jOC'FVf0*&(ޜ绕ԐjWgEcgEt;0mW [N՗#C;`VȎM=yqE\ɚ-fU6h4Ego`x['~ўFF5n/I4p[%3J)"Vcv{8 xko{d X[.f5 }(?(r~b0(&D2sAIe sk7Ii#@vQ(w ϓERhro%_QN"ܗ;E-VsxuW?^/6^?>NmVs_]&e_2͈fjiFq 5E VLF,BdBfPg%2$@,$<կ=2m{9(282 gރNCFV֡ )s=cPԧԝ:8󘇊bwLJi4E!FHr4Zc Utna"C-*;tc3}0Fy=2Nd6$va1[OB~{p1oݴ99|vB O=y;y$~iOu5tQN%|u}]bX~!_/Eͮ|,٭gݲ񽹟|5օҮoAԥh&JH@G,"IICN0 m\ ȌdΝ>-k>}0v>n ]|=߇/YIEy ?Gש y/#F2%4ҁ^pIOfY`JrtQMn.2{E~!& ic!W^lر\Zl'ó^j&_! تWiL.Ъ926=B-C̩pH^c3{vB,>z30Iryb@N/P8u-RHng txRFsh0>;MlGvN1^;pV(\CTz`>ḨٟMc6!,O9GQ9W,(e$f\Cՙ%c\Eh oR Q!)9.pwׯJ5hi(r=ǵeS8;J 7l79Xv~vUiϑ= BhYM$dൾ4b2\?ܔLSJ< ;r ~2k(IH:G=ņ@ Ε}X:E&f'vS@%02[L8Hq$ Q!sJ \iiQ7;RC2Tzϋf8f{ nRGr:M52}]s7)}ɼ[||/DAc* %vMe6=ӳd7;O'S 52,?(DLq.φ=hgʠA1Rb1[\1vuƟoL~sb-/*ɱΐpa rr;JB&֜qE7%l6BS&aGlb"e)H*ub|Q8|Kٓp88`" ܉ \!Ql4TD44 r'BDx" e0 BơdpFO7#4xfSQwʗU_Z`6kiz/x뻋4BT,F 49ȋѬ{!Y.A.U } d,-cy5nvuU*59AߠLyr3|'OmLާ2/Jxn_0HLdp>fxl;d+wwݩ!~q (v/o8Pd 'vk{{myi=cv `XXVb.v̔6Jx wUId½% $tմrMc(싿ͻ)va{̎T3_)Rq;"''P2)v2DVñ}[C^f-AeƮ4eV]̧]Z$q0N4WʽDa~?{aqc.pXL/__?\i/rQoI*بv:Mf`7~7 )wr>|^W )ȞR4cbA`a9_K)=cY{dw^ F4}j߆jND%h˙K:N^Gkm,-0N{X*tz8ixnMSӺkTV ?/* R,edLl}O! (`_q2TDbkq}T˟ EhtgfRIgv0M [;W7Mww:SX*G`GS:-gdf9o_Ww-K+j8߇m*A \7R«O@UjBΣR@l[#8LЙsFy%!c RSW ^aj,@{6m{==a.U9U\Kxsm2-lucikN@I?~7/"ePȖ˸&rk]Ԓl]PsK+xP=MjDC#% eUmӺo[[T٩>qmi P1:ئ$6-Տ̴%)4_ y' Zg4 5j?$& 6OU4ll|~4 ~c(!vxr FCֳHaIp"B88U$%<#\h¦-ϔL{V{?(I _j ui-dn. 'o;[ɕ{Ws2symIHP%dϟc{FD10K v/p}fIFک mBb!Ak-A܄@!C͡E8J&3;Q& dD70@A84EB Z0;9DKD hQwFcO2%JR䞄"WӇl*%`calm ät_sYIVs{o7yN vyc|x7vSbcV7lT[.I[UUH9dDžd#͔eVfԑ>D _U%Y!^HNQv ) bؼ&xᨢ2bo{OFa=&tkHRDd{8y qz9b'8A. R!ḍ63INmM\$ں#K(;qozQoP$e隓DHqhS<-_ cdA *P0RX 6n\m|ٞ:s(ʟF̀"]4"gO]U23^ O!)Iɶl'ąnۓўT6|'ĉ:^l/ԋͩOK-O(6Zʯz^ D@^[o'L}.>"4faß^Ьbl^o,1hסEu}qiG|4-}of[SFl:1)'TUޛI.W fAg-2*륙.yϜ_78yPR\'vVr u2A%1Q5ɀBMTDҥ@I"Y 6j($VάJ @.`8D RG(-A%@)\MލPȿX:'ծSb?f.jOԛiVQK>g%":JCϐGal"ŜGQ22z@fbwG:Wc'̵4ѱ!!bi 2ƑQah XZeIH5(MxRk!T.FV\ "prSN}@N y9;q# -/4Z",Y1k=iKƊsx;=!AծZ\)3^$"oZ> fۗ+D,:ȫn#;+ +߽. e/F/y{9F&D$OB\(Q(w.[P&\0✗S`@eI%%&fŮsNB&'H Ye2i4(cW8E!hclCxA+IiNbN h%2)LE WE|W'rHPja8ӱJoLE㌩+r[Jkx"JMafva'{ʉ1!z;K瓣wR@e@@3.{0ϡZX s˰OOEyݾ!TfOI >yC NǚHQ"-eH ^-Ĺ5>e_v0z+Pr5* J')If6^7oJI- 鬊yH # &@d~pW)CT1Z7CU')[ \/6:gT˨cso>uo`?. g<i͖ 0. FFΪ+JWޥZkOCe,_Zhiڹ$6P03'&ՇQBXu[U!5o/o|9':\mA1 +?brU(\jtYFHqB,{TbGbU0 %K8 BMd829?dfՔ۷)cY-x鎑}?gC'MLTlċdAE6"~rܐrSlۋE?Ӥ@C)/ָ!Zqxv/ӟ`׃dI!(51u 0A!Rƅ8'~^*IF~ 9>. cB ֡)a])H&pX$qB b8 tsmd !u4`؍6&Jk%a|H6+Xin3vv'67#TN|oIMbwlSPIraLPH; ܉缕 ۙn|BxS;ľuї]/J:jbн-,u Ynxm4?b\< I*k_ \6zzuۛiEmvיn(t[x+}tZc설 Á'vĶublSMzHGv쵷avQ{vx lNl7-2EbQd\41 <P MVqj .seì?f $вz?0n{uK/Ҩ:mǷ?}|7_^ p e&mEF wB{Fð?~_>Ͽß{( p 駏]gMwh;"{xnعu^.Zs7L;vVeA_iwQم EN0i;.H%IRvJp_}5JTp+^Jݭl/k 2x kڍo\ #vX^ex{ ĿnM?ddӇ+J~W#ӃGr=S[K0C@ ;D2ҝe%뉸:N}cåԈS$ϝ߿/L wn6w6>׻_@ ]|XYB;>̃{_007v1혇!=t͹3ʜ¡X:Q UW1 _@wiݻ֯{ƿH۹]wf\iJ?C7av.wGx~z x;5Vٻ~ЀL֫owzq[* q% W0Dwg߃7M>#Xc{}F}hZkS*8wޕ6$be,#z; [ L&]5B$eH))la3U6 ~y^T B;¤ϫX&uyR]9spGqE)K)L %N$(Kl^X!8 JA)\ׇIlm n?ƴUH#6O&5/|^*oe:r&3]T?wH\|^OX[e?eus;DX5Rj> J[0x1NkbnPwn7AOwψ/f» 9rq@,ܑVP3u؆aF¸c9t FS_+iӵsjӸ1sjӨۨMⴋ|B]z4Ktulc .%?u9Kt% }X*܉%zJͅrL .qvCk%hFDʓfUu ]P-%G[&ύRs')M( 3I RQ$-̜,nc⸻FΎ2m*AEfƀLgŒ gBi˴0E࢏- F.,1{|}rK2n傜## CDy ?/a) rMVE$츄߮ˡ)ДQЍ %+6argTYkO6ᴤ 4Bj- 7Nd+V(t9pq@7s6K1/LSΰYNjdc Qf\lXs`r0yi,7Y8fBX=@B2fĘ%aRSy~tiPq\_mo$BVJqk5TcMQI`L*0?m!KmGkeieZƒ6Z0.a%ך 1p {W0&ݯ*&͸,ɴJMr <&j["ˠ2Z0,3cWS$%QqgiY"3b& ;Z|)'$VSvh(r+egc1: ֋T:ApmEM}Ua^ƵL%=sgeܐe#@jHqU]Y^M /(U`92OY}* =Yioe&i uHNX UWoW~ pF_!*$HWPZ:Ii&^c{eq>u":ȥC4eYylghQѧ=|5өT[i V86#*h5]7"  ~!H-mT"DƔʬ%CL 'Y kTw c_m`ZRz6Y#ۦ&L+WC:A%eT)S!_C!wWYpS L#m[iZ (Voɛx)!@lⲻ X%뮊m7WEA4L7*EmFw18Pd6e5 ަyմD{ZѲ5DDZ0l ]dLT{B+7mVv57 `(4 ۜ\<͑ Q M2Hn: n@y(Szt"0;M MRڷ*O6CL@m)(դvĮ7cRbY%k9f~Y~CbAp XmUQu ΌM}}#&0#aNe˟{NsNj潇e8 @7f G:]5`)BYrm+JK ^ LFY7&.ͤ}ؼ0YKM;r'4ގa;g% ][[zGd_g@lj=Ğ݉ 1xȁr#VC(PeYvW&-,E>or} D*\{P7`zxm,'u)Ec>CZJ[%\0%.qiPi m2eWKl&v-S=ŎQc =:ʌ:cO<:0\x HSNqO+ڔiu6Lc~iW]BƘ+tS0$V'4"amhȓnRKDl.\+U.a0.dyS:Huӯ6Cr"/r 73|18 :^pNjUTjZPT#uы0G:%աI̻qm*:1h!)nE]G埫qmNիSfwϗo >OiOˠmCWv-S4lOW; q:Uw ;A(tx)@N <9kH8 (9͈.U^Qvŝbg,{VG냛;AJ QX5\"$ ߏ dϸZQ5j"v$v Bu+@EtwߺWWG=ǕG=G];hcA4j4* B"$rf^#熖_ʦHΪwiǧ;+2<4~t{I柞}AҐsrBCUhȹ7f7I]ʥcjx3)r<i%x~ gf(_fg/Y(!/?(g_*5M1kni'gegMfzF=I;C?š憏VZb_m-X!n ڟCB9t,yZ1CZN%뉂l` x㼐EnEio¤_AphQ+yW(ީ)+q:|=|rBÂdWaMQ#>^Fakae+ϟ>쵠x*ց ;(GF =8~()}48M!wuNKħw)Z|jAeƖZd"QZDVk=lL]U*F(3x G=9{zS4|+V+*}eဥAgTn?Gכ:1 ɮKY+Xm땒w"%T_%WTS,>^*QY;7^6U}*Wgѻ t>wg)uzhwB޹nS6$?'ekOԭgbZ=t5 AR u5tn@\AC@-'o}o'g.wI!{}Q ^ɋW(5c>)@8Jz:*VY'}%Z>hΠK=e-gԫoGՏ~[6u>4$*ObkmmQua(|Vk|@C#Jc]m]j ZvPn{~k{bj(o/?oQ \W{¶>{5̗vkqVbD9\h?;@fawz]# XC iwiث'bCѰwҨsRb*TnϐTCL֮^kk"L?PC_}e3K|u:?|bmeA$hA E*sQ?"Ȏj NìDA){XYn=yk2LYVvv(OqYNdBժh?e$UzF^WLZJV23% m( zJʼN(a3g]n"UBe^C>|g;#r4l5<^0ȍNrRPy"HZ"w2E>v}D'b;3>K5 0-w&CT(`9LN/r+4sX 0#z)>}_<)0^0yܼ,h 혁}]ܚvY$(+,s7ϬE+/ל)AZVv[9U/)QU;ޗ}>_t)oQ cz)Eoa"n vj&G>Rr10Rw^6Ώ\J8dSTYe XJyn3"ujZ17llha!VvQ7.SR.PbxC> ΕE`B]a^| &ܟ5/QJhy+ 8L{Ya8Xa5K( Z}Y.p9> nN= ][Vf,XO ԣbv@V`83љBPLh6@14u)^;!ޛsVS=m,&r Aw[ G&e</hh({*+eL(֧K^ @:@[:.現 GQgyy|y@,ODIJٻ޶dW ͥku=\̵e%W[g0VRBQ`ĖxxZ4ɗp,HXYoЀ ɉ Mdb\G-.{T@])GZk+X㏤k[!e-T ZsDdzKXza_R'$ACU@,ji'sbN/@ `Kɝ" J}pV32iS4J=ȳFH-Pn*4Mȭ6܁*MܚbJnkV 4)^/|Jr;.lӣp;׷Vo"*?޾|\{`Z5? >3`Bar F?zɝi$WL"I8/hrFg4s(h@"h|JV!)R vVmZl9Zzd)0Lt%)P=Pey#gh%)6ւAmKg,i,#vV83ã| rAHZVhiԑ{cJ% R(]UpK*h)Ҕ!h`Ah/IJ akbh!`J$ uZqa "%0킠oV+>QY% zO@(O##WܗH25h@5.;qK9K0)ŀҳfHTd1i_G=sBj<ǂ-s|3`Q<{j=-FK3`3`GDPg-K%_]͗=k  :d~Q GA (V٬2s}+}ؿffһ9|/'l] 5KLG\Ga=\2`;fvc'} Eo[0tGp2Pbϸ-NQ5#8%{ΪȋѾ ns?2Pϖ&{rixe& 뷹^os \OFG6pTJU'u^毴aT!J `]AA'S\0v5mL- 7lz[:NiT_m^U.{=t#Ϧ^goo9GppO}e}9rA&=|lClCrˏў>~jF9ZgF͝D < x|1|磖QV(>I| ح-5X-/%:ܱMX99zkX˹m{ШYe0.uD4 O:hdC3;fB,x$grHԩ6[2-i0l)؁+!0FC>0W@aS=~|e욌}Vm"\WSfn{%lpQ~e^$gȹdÅ73ʓ-ʢ1ؾq8b8(ك]Dw5?_x'jFnwS\$rѝ}3Us=mǘS'*<\itG `B?*rOi>ҭn+Kzw7kUݠ,b*ZΥ'ݗ\g5V=Iȟ\D[TھFWfjN1h]o>ҢPu!!rm%SB 5>wd9;PI ] /{B_ӥ _mq][>^2%( PiXϘnPeH*;)B\h2DAoz#8x8G5|oWRJts9}ΥF3)Rʹԝ\*Pޑ{![C );KДhu:Dokg m]"@`^1j)"D6D(z/;E̖kw^nora xq.YQ 4r5Kb P4 66,}sʷEw% S<(':4&hVPRm'emk`XEe"v,10~k>R:`65|O|:gNMuo?tI&T߻%2x'_l-j-_^5^)c[v8s7N5 fKo>k~*!T_yןy<cy60:c~7*1/ \ӵ#\PZ iƌ^Xb&!-,pLs*SDP+z|#tXUw S$/%\9gk$5+yjZؽ>6Y/& Kd|Y+9^e. 4vSd/(g*dD9a%t9}CT(.vEv;l*)t>R5Q6IZ)zkvGes'L:}^-^jFXk5Y1r]?M"rq҆SEѽ?oXfK/=\Z{NFW5qT)5PimFf{vpV#\Db}mBC+bjXx &%IwcНEԒlGOϵC!G'|CٞNNj~0_}Y9酝6')!7׿\nO6\*]ݴ:'Hmv+nFdE ?&zv~[]ÍqÜq+4g4:_A8L$#S9HI' }uAzMoh`ZѪ.UW`F}P , ;&`lpAPjy D۠#i춧Qkz2$$0,2OT =1tB2mTmR>w\oxVzi$)G{RpT7J>ALϖ@^RS _1$B\nC(]<= sO3Z?}Q߲o>L&.xBشzk\ѠQ.Oƌ06գo7coo+s{w R7+u Gv:nn~B ]zG80 o//]ybއ7kއ" x%y!m@p/F6ׯ .89?βa0zvd\]Bm~2?-xm=AטҺhΧSM&e&?Q^\+ʕ/[翵!h.n~#~-ux#7߂qӫl,hDGvܓABuZV(6EPLxPhs9x׺S<#@FT'ks>;s 8@OjCrD&: 6ԕ+J{I}Zr_oqy٨(@eпCWJ*jVWʆܖ%}.WEkm_r3lWMl/~KiI(ܭ {(=]wgz8_!'> @6d%WYRHJN~IJ"hfȡ eq8S}u=kxx POTmsj|B#L߷r&Dc|knLnJo|KQZ?Džk~\Z"Kx g w`1,[0!XihL=w"+=̗qGQEs)k@L aYʬP%-11|N(oogf"R9 P? =/㯆{ 4Ūbe_]f.OD VzVjԍ s!wK~bdYչ6Rh,O()ֶ0`C@Ј"ϊ?tdr"4FP[z0%7=x*4%%6G+%Rև$'aNzdj_/e~~E͆g8QRbJnDdeL*i_k IYZ_/;FY-xIȪɚtn/{ p))kaz wkSS/u*TYOgwTXΩ7Uc,TWb.ډAi&7L"SP9 2Rc¢.`B+ DcKI(.T(a'OP\8Yxf{p',V֯vﭳņg|.C_bf߃#B"V q~ H8^!ܲ'3xiG(,kYUj6o:4$)O7WVj4G" v*9zE鰩-qnE CXݝ0W/4;nqqŹ'h rqe;Ǽ# Q]kpfZC8~\aLB+9NVNtzCs uݝ~ooB@|#\ |֩C=nFэ1>N9p?pᓙ^~WJ$%kd b\ Mb])~:EՕ^9o6@iۼny2ASOqn鲝O-=G1:L$ɞɛ<‰yE=|s8tiZ|sIk';.fk-8h~o@M$SrCRk%&)m3 1"ccؠd)q %a!uh4 i4cB}МCQ‘ZAJ0<3#:)-1㡭*U.2#@PR0цR(Jj3AFkFMǯ7sE87}2*SjuaiH/%9AI'<*BlyގozY.F?ijY"mH/ffG2 4ȍ Ω 4 `_^qE-, ոjq݇Z!Rz%OMfz"RtnLzHnƷWS>R4Y fzu?7.}t&k~t_^b9Is1ysGNԦf" ?g@LD +-^(`hn5 3bSu"zu#֤ӿsrWׂ+|s0sζo47R]!"Ąq,h(d&pl)ju$L0| }"p fTj"q5k0Y/'-ZZ㏖]EVbӣ(.e`jRXb;3*61+DCõ꽿v~B\U~nKP26@bFpLfT[H0%;d @3ҵxoyp:e-֠ͦ@m5<慯C8&МW1/[ _I:ت+}nJTM;j`yWU݀j.xt|qLUUAj8nh̊óXz5L. ™*)p7ߌ 5Lx@eBKHZJ_<}ӒKQ%cx^vKcD%^3C0BzlQG88L5xWEGCLQܼ&n0a\x[LPmq^dBFNRs3MZ'9y*PH^"y%.^\h 1Q0$P, B>Z-n*R#GmR >Vu݇Zv{nfxSw/Δda6? &Vx'gTGU\a,vsR0#tPD!ceD]u!ar7Xwcn7w;ŷa qtz;9;=`yN0g1zޘ?5wӫ`tQ}z^Y̰7u KJ&ѫyԦdJ+-^QOD[YxR7BA )"7i9kU g38@,9ZFPg42;k Tq !t`G/ *E+4 {!l u"o !ގDV"LX J$f"b+Yp SޏP` VA#(qcL B3A1#%B0 U)= Ү6 -sK-8ir; pZ)NTq̕AKLa*VO8xu&r5g|"Аm,p"%RBIf)8(ᡵPXts}d\EI^)%-9)ca8d'Hi GG HT5FʶT%^AfRRd999Xl1{!lhF!"DFp ! kx >%aÇGa*z'Db< vhfN'n4 Gא%ׂفk_Z1v$f 8f'Z*pNR3l$?wTb)&DF: 4٩k7yHjԛfQ2! iӀA$\R':F0yF|M6LE&jʰjk<3hDat<*@UNd2y6-K/G$1* 4*UZQkrJI::q<)J\`;I7N`ϰ9D,Q@Na8o]M tᐘUɰ!e|u6暥U"W \#kʊﺛ Zpa]GvZ1:Р뵮tAQm%)kh.Rt^]{p\wS̖C3RԩÔn4xGJPǟpz&/ևe(⍶Oe:(k6wo՞_];͖C iĺ^NK"7߹xY&+0cQv׈n&?m5ĄOb0Kq oi&1Jgg2U6{#p)o7[MyT0}%otw5&5_]mKfDEnIF!n 0u58Bеb{u3錙;E5G᠇yO&S7!a<}nC624]PI4{(}_'zc^\&Lg@?m͏iwֆhBj{dux*˜G,Wզvtyo}c6T>o%*ȢܓK+-J GVxVKfѻ*n߿{&$ZE%1`T`֙X:N@x"Mg~Z4@YJQN  Arn<3FKcFyP"r^JnR;J"Κ[хKo0]1Rij5Df肈DZ&FVԤnF^T LKuO{"$'ѝιk%C _JhJG*d2q}BR 8o/b2Hj#RPѹE?^C6jGέJ(cFm ^a5VQ,lyq1eW C27(QB /-'bŶ`^"Nk-@u%A4uG*(IZ/PeYJ*/ׯ l']zI֞=ӿ|ͷдc%}mdU+U[5_BIsqv JT_&aޚq MYZFβOJk jgP&_|G<)x{*=:P6sjSe tiM^uWچ(#Ĭ(U˒fnX%л>IXSLIa@Z䅡!()T :(.tJd02LPÆVJ= [)>Wj"Z喿Bq6Zt8}rP`Zo;@Lig#ޮ+@GO_N(i;t4Jka( "%)Pn/iPs);9H & b}0w`(~šW$\ %ZQ}a`f`^Y0q$jyьz+g]߮܆@̫K}lG51+F(h2 ?͏QpX,fƸ¦D&a}~L[2\RQ.= t%xZV d[ȈU^.O! vlNڂ2!D{ȫmOIFY5;0 u~2LyLN{%OՒW2pi٨5fߍQ@], *Ff& ۜU05$MaE~$Q:u@"h%0-JЅd M״ J Y&L_iڍ f^Vwe@vl |B\jjwmo$DWBBW;|ϧO=fȻDd|ӯi,q!LdU\$"YUbq4o g<1J{w:^6"P1WE}#+=Ϗ3ߏ>ζlX>ǙH>hQ= MMgM?U*4~VI4e%z~s "#ZGz%{jl`b}PцUSe 9)4g"-&3"ܜ(l% qHJmjZi`9ya/*}XDYM.OhEvߠ 5*&{}bܼ3=F]3?'A`'={tTJv2va2n#)G+YƯdltWzMi!q)͉Gw7}wx B|$Sp )GV8SjmDCS\hߟtB(k7ػ2? D\Z\ ( |g呷@fڍ/ ꄋ -Fr*Zfw枷3 Ƭ>ؙ;#;?9-5;G&t9fš-8~FI1ܮYyqBO>-n^!.utmHE] *zl@I7JǓN*|3u|Z#C'+aԡ{ͽ-5ޔ׊.V C ԲFI1oRvE'+L1)Qr ;zs='}! BT{) /z"KŽ!fכ^o՜b3j=*[6rܲM W4|_ mA671@Pζ @CgCVBT/aҥ@ [ %Vm9W$HiI(p)b\JYTk5@0¼$-?/+P;fQ;}' xOz}4ϟ]sٲ[ȈzȻGYcW7l@xfx ~X99u9Mk)A]3LmBg ܆x ˕g&KIT5;*.Y핋ʒ2Y%냣{kdܧReDN@KbF籇EMs{X D"(g.ZҁM3Xt(^\Iq{\SӲx t.k GٖBzi^fCm6[z7V@"LU&bwPS,U$ +dV+ΧVĿb3x?~gcEdV"# 'Bs0hDMԫ:ToJVix(?9Οdsjpż[sZ<LRh30&-[I*yc׽C]A8i6Q!;{} {'嬖*'֫bì:{%K*6J8՞k't趞l%SgA* ;)@HBz?DQːl$hGZz{y. Gn4ٻ/ʽ[<]b](zx/Q5Ly2m& dt06{ \JH;q () ݵt!J)-j9#❧bvE$u3ɺ87UdNF["58jGC7?yz_ :~nZ :#BzZ GIY׊4^{SKNp2_Q-`ʕVZfeJTF%]N2U뜴6BBqy;_B`fqRSŽT l"YTz.5z()eF< BDWhWZ(bЂ< Jg &Q!гhVӟ(1`?{jԁCj:} 'v& Пdk;šG&GLʘ&/D5!Jx*mPhRk3hr2 *0L,' <  B@5=il`rE;2@0'(him(*>R:Q")4=%8íG6R6AKj2BXD㢍֌0K3$Y|uúC#jV֪so/g-UTeE>7rE`4OK10)܉ܞx>2`1Ycޯ<*f4}ƜBm>BM;&J!n=<割4i煹2^pFf;f |O3.yeVK+/&kVR܆W [S rNgԑn4ߵtk^)X!o|)9IM'JnLΩ_uy85?2!TL>{5/">=;^M(t> 7 H驸ݓd8RvǜoW;f]W cEDyWA(-qlMw2Һ ](`|AۡX=ׯ͠sCd_8-]8;Ze]o/;ㄯH2#{f2ʈQͥi^Cޗ6yQ y31>$9n Ovi} =Kn#fX,'Xg7jҿ|] [%w)e i }g: [?)&JݻFo=ky@һ,QJ A3<wݖ)I ՞p*dYR)l*ʹnh)ӻ1KA涍(s*TVF 4-ٞFY |iE=˩CՂNzPq9j%SRqŴ3[ADNKUDdJ`FjȻՠY1u]_"UZ*5wM kp9 TzuMM7#i##MXҡRZote$?{WFr o6\$~ZV<[a\:*vwbU64bVqdfD8KP .!ܚ`PM&ޥ<轣ZsbQZaj 7E*h*hu#fK&cޝCS1nIUF"C}PueUl r41iLzS/-iLo (2rj+*VaGZ˖'_-n䫌|}-/bT 5#ƒz\m*U뼃*󱠱cD (QRRnx6LZEa%ToARinGm+=(j.EV5+6azRk̗+< P B6Ra%Brp8-w[' 8pDiRf(n"SZ'/4K6[> gkKn8nT Ι\(,@K%Mdx_tBnL2A0ժlc#TzQg0!N$1o q P ^HO!ZS%iTR:Z)jQ\4wOc0M@#-g`"T+JֲJ vXޤ$󿦩tPP}OG\RypѓaYI4s2(@l8Q+ ~,rh1,r C"N_/c֛G;6SkP F= _? {s7m /Qp@!.f|hvk|QQlgUƽK=6)ssf6:p!M T<>fG-0b9H-FЊ}"'T )2OzhTBPrk ZHW߶?NT݆i\QsLXԙb?0>!u.U'"lv^{vJipT. UL#ɞ9!rpv!x4k2!Cえޚ|'x`c8V,TjI&&[k! HSrk^k{Ԃ=f&Aw,F?~_M0WkԞpa0߇yNd;RE3h=U xVdxXA)PiڎaC\=倃;!,O D.HXBdL6 ā& <+y4"h><E5Sp!#A\"D.zĚqq5XZH٩tMqG':$NhAaFx^0*\̍]^uF[ R ()@bqT)JRx-s( q[CUH*hŔ-S_;5dB="{*pT5[zABy4hKn__(^ fD=L\1Ŏ;dϯ^rfq0)/Yb}/:=Һɢ\ax ?OV4Dvk/ݤlԋ$>?xQ^ylf>g  +n5{1sf<(jǃSн ,UhIE=gb? !;}z#V#w0,ەJ~; ƋFL1qPAH).hopN_t\ont_-QEԽV񤳇A&|tHQ?JJ^M۠?|!ld[w}KRav8cV̭;qѐ+I>qKJkI]̺TVRc}ZB``ƟG;P?0-v\*ixMj;xkt"{3\ FOlA/b /f'(Lq5G\{<\{>=.PNgKCQᬺ B4٩W7f>!\7NKwH\nNy x|_ {3,h).IKo'~r6=|L jVstf9:!,LA@6w:ޖtJ5`K8f?D - mhh[[)a[Q+&TF$𵚯טR-f}԰*gٶ6 PxM @ZS!GVDzwE"-vd'52|+ ,kHG±Ua;ͪWƨk1< =geY23@q :ig{[y|B>.HB?݁6WyxiZ}]֢\wI,ʍGϰ(7h8 իUY5ɹ%bm6v9w˦AE}NHpN1ѼLNW%& |]~]5x7/WUh3J,RDG J" Jn7D{j3p-> O_>5n7x%&1RC Cklj^x`$ҔɣCB^qO(~kF C4QZ.q`}_HjNxtH衔sf@`R9ɫ1PY;)䱟ގgaD 1f rnX'o-A \rj >6 W3krA(A!D 3*,!J(e֞p M-&WVb (*Z0/Sa<%bZ>+bwkք)sRmQ$j!s&I5rCJFWmX2"V}ӀjAd:N:q?93URC&vIDG|<ՂQ&Ner(M5G|wzj]8ӕrb֋}oizJ= O7ӉK?ɗSXnhb4bQ  '`YX<lE4m1<\fO˫z0@S`^;p0Z|) 5}6s8t k(~*;&klfw]ygθ~xc}o{{K%}J}}6L}OeYe>}H/P~Y6(:^opN$}OBQv\_'4x/~Y03uSD_pJ×'mM.hMT!ȹ};-$:,oD8\j,ߎ.UŲ[k,~q%xw*Nfn:gm*!^y:^ԫTQخG2+ Kbۭ Oyխ^SRyx4YV(*P&tٓU V+hmu[:Ek"!_;hڪn=9❡ mE\v|*1@ zH~TvĎJkMHWyu7!փ*G.@/n/>}e/owoG'L>! "Zf07͒ mڄ`20XRs;??{W᧙# =n [%v{0}.DII/t}YsT)O'^KF?;`-i JЊ~I &H` f߫; ni΁]2O S(zB >o+?xM&]oغynzءzy***n/,YQdJ+I.VB "jnlpUx]s&%7ݭ2}$ܞ_fͤ,%Tc4UTuYB qI!Xa4'΅N4d"Q(5b"vNhpBHTRV0IZF#E- "F5nٳW'ts( Yl2TiASlo%h{^x|cҐxmۅ{ NSZcԮ2aY'qXScQ+wl꩕Ni@:SƖ%Iҽt@mW=+Mf6ӯͦ[J( ؛yA)Jx( ͑H ޝ$$1m=hTsVDu1yZ/k1iE鼼^.s齓VVgv"}q$΋(S CK) Q3ó)FHyӃ/j1 E` Ґ]z]ZsGWɱ^))?Œ/)wTyA{%)/ v^S"~eq N[Q K xẓ@"Lˬy/k1eEĜMX^0WĚ"Z%s=p}2wH W2zTϸ͂G c)!uEICfc#( 9 HkQѼ7"U#< +yoRJOi 17dV#$$9: zZOcbnv0oo8FJI%Bh0w"iV1|"IrVf)\6l=~+>r ˏ`H)%\.7U )4KǀW [SǔLf-Na CSR#aq( 4I|LLDe)F8zԡq#&&20Qb_展TsSճpbHD1b"x^`d|LpFQWpIGARu:=G6AhjZX'tFcKcP!g]xVTw*kQgReLA`-AQQbT hDA TY&5.pY? 057 c\>+g7/+- S_f![\R]2pV2u~5b@+`dt*J5E3@fY\;+ Sc`,;$I-$=sÉ*䤽vEz*G5 R{R; (6!ZKaDFZTnC^~-3F9`왰 F[(nX{/JΘxlbz{Jè^%yԡ5$#ƥFHȝЛ[6Q^>6{HP8PW "}<wM -/MNe<>Q/]R vJh,&@K79$4`H~~|gv]l&%8,gH&'~ h^sMcE*\qq'L[ /u;nZ)yvLmV p4>w\bE o='RW/0h<[pޜfb-UXR0DKXb\\r&NǗB%!v8ڝZ; bh,V1ul~䩘"rHys{ioUR,H䠖T Y"#D19ӓ&/V<[q j68:F|DvRK\`zdv@%W|ǃ̇j; NXUa`VaUy)gE!eV‹Tʈ1anRsdF]t.z2aS2Psۧ{E>ʑPssZ˸2HsJ"s*ErDUxv0p] O/88d&a0㜡JEؤLVp}8 Nɔ`s%u(#g 1_3Ꜵ .L^4?؛Qz`LeU*z L}!iUo9:H~׏?==SL4mG==|1Qq1|yՇ5_/.-|BD$$O9GbZ?le?.WgU?OG`j~` HK9>Z]\](m c{>OGb-?^]UpǰKA8>;]g?zު*Qo Ui+^Tn&vLR,6I\Ǐ*Be @ CJuNSۣgp~8QGA .2M$sEXShd3fYMpGtX< d)7ËomWب'ӿ>5k9B)jO.ϟ~Exc kJ,coឩC)CIS{LjR(wXI3ӜE\#Sx^0};CjI^Nbj,FA Θ,;< $VqF)#J*خU8HYh]:WwY1-@!95VAtRjœrT ,'6^Rj#KDiJU%k'`O wdb#!3ЊwkiDJ.Q0w&P(޹PR>TK;0 ,VEJ<3HGu3&IҜ8ld'5x"[R$ǫ)Dz}'۫&k_,Io`mưѨ}>=VEY^&vJ//Uzn(;}5U=`7jESD5wsLu3z]jfsʧ:>`=ù'׋URjR7`#;E׋ozʊxzf8[ˬlpZoA{@tyn{zAMuwoIrw?vc%_EPvG`Jꓔ"諸ETEkm;G ДF"݇p%>nV$GG;Z~#b+j2;[ I*W@jË@:-3*$چ;ǧ^-QBs0԰>>S+J'VKo 1 .;C>8mVOVijc\T*)αCZL7h̉s+ u(,5G$ =G郻bMBpl = 8M>=Z?tl̸қ>Ϣp&KO废GBH$.pd,_|s>cyT7ay}2EX'SvDF$Z;Q~+عo?1)3$e%#3Cv?|q4_Z >{ٗSk([l5::tNmPF,Ҍ*n 6¥m0<2Ԕ5ayfu䈩Ҷ6vvK x*vyjM͓7 ÛHGuJmیgB$030˅5,FdLi]j^\Saq% _mHE ^ȷ7G˛ͯQ=EHp'!"4EedXUT^3]z,WlU/+c '#pU0Jv2A#+,~W>\|A;"{pYᕨb&)zHC_U׫k.VVg) 4QFX 6BHAV&E/p#o"i@+wL(V$8sE %h4s)Py1njdRBVfH`[@7KR9R&>Ϭ s$5ȁRZ1/I8E&i )Y k rNJK9hƓE;U"$'HVq-yP \oxO!$y&lN^Mhy~,n !~oc@->{/{|gw0Ź)kəpK[^J]e(5N$ah(ü1{Qy%:ON67}X n]]/ OJ'߲ELp\.BZui{w?銏re\qټcpu7VgN .HgZ>٩$3ɜeYqS|/f|`[gɺdrh~H~Ù"}qNQg߮Ϯb,BGo<|IG={QT+f5g5J@qDf\R3s٬ij)RD%\[zFyk9q!z` }?K#bKqͽ|gmVɗfU3Eq1)LGդco#Kkc>z52ȡӚޘ5Xrي)Fb;N|'(Jw#ʎT-NJ.rR䕁`!0<sD$7&Z55|U+Z(5Zq)~/'b`%/s8kPjw1c} R'0Y Qglݒ*ve۫Ưh8T%a6wj! N%g)9:FEnD'9 79^seI> =N666 KQ.w~"Mb V&0}%8 ۡѩY^>UpZ$U;\M*Z*k%3C.IrO }d̗K`E2>!H(3]ݖ? _Iv[,#i[1}%%n8;EP]d#,̌zɞӂ8sČ\,1T Y:If ,pKe'ς&U̓dS& y HPQކΒLb|Pֈ$wjHe&Bƕ%ZZj<,L0FI@blh/e[0v1Jaz3[zD5fLRIQ+gMUЦv Sx{Q 6zT W-lzܫZhPikEF%j( h6[RD?JKY^$ϗ-EZZ9M| ? ޝq~]:=}C,pA]ٷ|GcY\}}]w?=Wl>dMarj!9lݏVfo%*U%-Ynz3%n<*)tt. ΒDAE0 |]T&5UCH'% 9 &Y9*"§(&)u!gF#w}L;"k#g|h!#'?1ȠH lr>)vdBYFs DKyPɔ$jɒNHD >FXJ]pIIYq]t-9W21pIT&W 3Ӛ<~ }/ vf|2hpRQ0 >IE =D4 j(.EL gVvCmdQ!L'GIX[rk Z@nhRJsCzQΉbg,Ҕ(ezy%Yje[)`Kw<Ȁ(%و1fd@) (3+IO^()Ke#]EicK2vG9JYj|thQ"1%>)[oh|`ڶ`ۛ现/Ur\ qlv_nVxoo-<{X9^Orv~`zX??ߴ_?dW8W֒,Q$PpqW_Ζ/*id/䣭SLJ_nOW1|7x=ؖ`llLˎ-/nnMߢѲU_%hW$C^m>zrNA mpMA}C,60oVW.yѶLJw珷cNveG}Wn27/:%ʭ "B<,{m 6 C]hK6p&RC˦Rb=zGL͆wvjAF0_ITӉ ɥT؎][YXwF헛MQBk _=}ekKܔBTkήg[W^Gq^N;۳ާRUxY /KUey E#ftHa4[S]om!Ŷݤ xށQo2ʒi\PTU1[T7Nꖋ6tٯN_~UFW͂DTG2yAJM9W!F1;e8!B|dUN!*وy-K Aq̶RP"J9qb̀i/Pt8-JJ0c4њ Y*1J#do1D#n=S `L|#xomB]40)Oئ ~ p -Zcn݀vԪ޳ۚTNJ#=nqM_O90-%XIy}^ʖFCIH idYH)knRv I1g#MY8N+<L>m}8tI9´VV㍴VݴV'ԓlwþ =8bˍ-eΙĺxxiF7%>r|6[Q{A.1bp 0Q;#8a.vMj3 S Rm\jGeoR0fߔs]@u3;IgeNȓwA|ˆтL>1tNZ2ZL*|/B8սi#F~㾪1{3In/ꆤI\ó?.Ј=xPU=7ػ= Nu=J}H`ˬV6Nz\I&߾qD8 Y4Ε7"xRܑ3&Ϩ8#fkԑgmrٹߡZЋmu\(1z&iQSܠ0'il bKTWy?>-aH&";uwvmHןc]UhCNDŽ}Eu[7MSIta!Xi&E(7pR!x.~򮘔wv}&Ǖ>=Sx5uThvYHM,V!=%AHL V1OT4>LJ> zϮ#ǽ- ]Vc/2N h1I6d( >};Lf喔s"ٞo; TQjn"Vw{e!j!0Ƙ 4I̮=a>Dh/?jx KBZ=8B Af*K[РYH,%! F '.0dfV ۿ@%[G[sMM#`)JskR9ci- UCzEsnh%za_N`+03  a*%.f]oGW}Y aN݋/n E |q_"P<8$~]U]-d];"FS[%+"Gf2 T4TuVby9*~K w~)ۥZYS=ܥvd.?ap=?E)Lov'.h:U(蕀L5kMUfvc)z j-o'$Ɗ;nZf@Kn1P1=(Ԧhpws`C9ZlfnQ kɘ>Ԛ ^I`lx7~]YUD%KoHSMP= Ke .+ḥXNʺHi|&VS%$ԷNNԙXUD%;h5*2#.W+S4qqiQJ6[r7dQU4O>׺ND21Hk Fby4*3#L_:rwK6jRT0:nA!AȹjްCU9$ȁkV&lDU LكʼukIUeΨ052ҕ]&ƩwW STW2W]q0%5rխmTuFU5D-C[:"5_?rvuB&{`<%gq.&qR6] XGC>WTLok/E\@TD4-uJ&BZi߇úrLNƑVV{<7 u(最:H(GlOc޺'7[kd1\~>]珳>k$fvO4 P>oc&YKfԛK{1fp2R^%AʆLJ3[7v:W/C-޺|&dcR Y)Pi!fT2 N1nr7ޫY!'xX!}A֦91ߪ9\gk+p>EwU쿻 5bjB*dY5iҮ}F&~ۻ?Fũ_.!u: 3F,.|^ůn3AT$Xc.:el)dC`RTt4_d c~6kƒIh=5̓JOH}R8ڣTX/gPQ\4+.N;`gܧ}9]Zt()s!Z|w/G*\ 2 eO:^$ȗS]f_oZM,Ժ9Z(n5pAB[Kgrث@T`ۥgiGތ F>9оz::9೙FxzS DƮǧoܰ>bZ02>G޽} Ne͛ yx 9"ъ;|u2ǟ\p:E:[C3A*_1ú8M#3*uUH34Y4Yab cE_nK)ᵼ, R+H)tFJuo]UHgT :e{P5~6z XtفnPX*_TUL5y9PUFՠ/ZTaZiASf1A*1&[bl3%D)k0&ԌiëBKRZGz)1JIPXH!BSN(51gNHV-Ww#3.J7|s{uRr4^ $%B)T|VxU^!34J68xq2\57mm}1 j yomԝ({RЏ9 eQ9UOZ`)n8&c q]Φ_͇P>{?Z ˿z70L~1S8kk,ZVFPd3cδ$bH'FJ&J-)bJg3yo~Zr5*B?Შ9ԝ\b]YzZWVGoDěb.ݽp` BhSO)~N |7QD'˨DoXXEc^,6j\`uT _k5ԾS[`ٙhڑ,Y!8w?x\!Z򂵨}_`Hݥ.[-%"7HLdE.; !YK VIcDAREK)xQ=" v!y(T"jHa1%)x^+daF0+~G2h)xc؝N$Z`l˹H"rN(Vǔz %t8' '.r&VYAӍZ'4ԣ-yhki`m- ¢ N~'`I&{Y?+D^eB _ۓ!L2Kp< X%ewa1\^ja`кQ Ru'do 6۬^vbBհ !'d)*X# &C{ |H5ut:I;&۝tC>|eٛ߻̠_0<5V,. {mKo,"7#g̠&VBf~s5l0N VcYYyT4M.8jYqC]-L !/- z;蕊x0ӌHI3%pIi>A9 hIŃi%abd28NNpX:H\)Trk+cԹZB)oʄd:9wcb %B>6DƌCC#í7P -Iv>njwLNƑVV{<7 cQs# G 52܅[W4f*{?.?yU8]f q㉙&Fxc~5G3co%ׁ็(fҺ![Bf; ?& OzdvЦ:4~Xa 6_RNW'@YBI UR`iڀNnb¶ea 5\.pfLe)Ń|K?XW*ǂ}PRX&{7q vΒR.8i_d2P%`K6 >P {jO˃pf%v+Vd8઴.N I >AɞT䛙[bx.`g^ڽ.*h ֧U~w_ ފ7F)#Pti[9Y.0!d߈YB/_i@UܬA/GȺi8Rj9a۫_xn>W}]RؖyUT~Ux'%hrˡ D8ݾ|uYQSߌU(lZm6chOjE1yWN $;&'vHх)\A2ɸhp&PN qVz.AL ^4L.mjifЦkA jLQ]魏1n6Om!mQDkc7V3a]9Dh1TlM 1q kJH\ƞc'D[-M18f&ZlS C%y99f/+b ˱0xc'fDX F?{ȍ4,i`v 2K^os2?Ŗlnl/g=RwbHVէ &DAhMSAEB~%m?IGr\oґ>ܖ}u!mi*j`}2 ]+ISK%kf# r.Zzp696btAFOjZHiFQ$g*Lҡd:1:z% ac[!|k'"#2&?LiًR%4S 3&NC*ͭmiozޜb Yp \rʃ֌/'1s'TJ~%0uXe5M]^<;-fsB9<DZ|/u20 Xvn`guqEݥP,FFwd8I-T٧ɍ;zH !؊Ze(.o-{ 6~Cju}XhS=T)F@Ō:CuZ k9DS\:AIt2n^] lXǘ7@kP&Xk]Q6 *DFbj7JFG dj8/I}l-#4Oh|¥}DexfySd4&wb/`+Yy-S0Jwn lRJ%DRo̺Kʵiㄋn!"0žBV8'+:نRζ۳Oٚ^+o*nXes|הi7{{K0rϷSzؒۇKW׫6XnZ쪛uລ%9ӟU3o S>_;٪rMB^Tck2К#zP \L'>m6ZYC-Gwa!DClMt߻ f[(.67괞Hзw o+bX+7]c)"$v8ڙ#:L+]w,䕛hw2Fc\4vg.?[[W_gIIQ fr.4sk{l#bP Te϶Tֺ4<6o cN?ӖdkB#wL;ԩ9Ьd3>PJxC.`@ʖݼ~g"mMVZ9. &:Jqnst'[1mE 9V;QHeF,2O]Ц't_},bIMjK2J{KH f_vpJqk.>?}j\eGTh?Ȩ~Y /[]4~Yv@Pێ_¶瓑Q _E 0B:P3b{W,JZm6 @_g<"=YX36nyacs!ɬ2Ê X\gӏX%j_E]Zui-nQ'GXSB W+H*)aX4R򂋔ZZm&ET)dˡ/:m҇8{%0ڤYO΃_lFTr% 6dnRXtrGKhJL!V(DłKR')XFZˏS̰$XSV3hx&MXIyRTsn ))XtDK. (2?Ja婦JSA 6KJ8 HɅ(2gh)GՂZ*QRf%!R@I..8і97JJ3]@)eE J'8,QBB̘4)8P#!$K+Y43I Ohҍh Nw,-lVmY6)o|Z?jl򢷉>6u?$;\gZ~\\O {Mc[F'cwfT}NDT'jkGfÆ00sCSfXXK5{?[Ph<|}3>f/p6zb|ڪ͞cY>:tJ~xKZK(ήTbrf(OB~z{F sBs3_ps|߬?yVaLI@)'ɂ l}C76=e8ukL!]=gh^ۺ{A@A _A A Q&~Y=Z(Ӂ**O'.4>F#V!t%iTrױ.<0n֤^-]_j1<ԘGXm(~TC h zޠ"e7L'2E)xLI,to88I5 -cG#.|*NfjTOllݶ"[n_ӈ3ih51m=ͭ~bpmm_=4цX}bVE=jz|q fh/ Bx˷Ӄ+YJ\2 JLjog'" <# hcg9TfM^AhgtJ۾<+M\" :cz:!/L0<70\ok=6PwSw:ĥ1Eq~;&M<{W r[_[Ǝ4Ed@/3AI VldhV^Zhb@;ʗ8d:>:tXtˊ0՟I4T 5d|2`mbE9VJ/.U + cT^X+e&z[3f\-1E]g%5KsIRf$**YM"qh"Y:3(4W]*u1M;c^Rm#{qyB 77 R3PʻXPMGcYN5epY OI @ Q,e%TEV@,uel߂qCK'V=Dif8h@tKdK 2jHKs%dF PA'!,e^q%-)$@WL w&S(*<@˱O|i -RY4c.9G;K2 33||…Ig.\2C3WQ3Og9 )hAcuzAPo-=y*E8b2zBUk忂Bw/VͫWЖmfE-,f9-Cs՛-Ԏ\>ex _y>AuS"뻪l(u6i+n),!'BE-/s*>= "LT{$efJ\H]' rBӌ3P3% ˮ2s!#1ΝmsH%Tzjw G[vGo#4꾱q&4Zzp-6GJkEE $b>2v|'c0 *(k@[(GLHPѩ=`GY|zц Bo{Ahla7"b ߺT)-{Q[z"QtRT7;KoM5]: 9A⒒h)F$% %#&8G %r# uݣ0y(5'LB|a+ uu/MV" nJx̽[?l3T*ßM Y]y-` ~9;߯jU}Zr;s|Y_\鷇:e"'.sH^SD5KΓq?"wcTUtMUpVzʧ\⾦uj$ y&dSj'p W#zP \L'>mSUSn -r Z/ɩwz - }w T[x햣wa!DClJ2sCfj>yD鄾ǻxi=#o^bX+76etEJH`Z]z]YKua:F*/= wLrܥT.9!|P4ELU.9zʥ "-u'cn,6wN$#GCj.,N9 {,F,l*OiEIN 1E9RaT!6Z* )=po([x<ASH=B"Of(h=]aoh Z?h9i&efRJ(NMg$+S t炖IZ2c)YI3[M'þGV~х NᡡlG,&iŠ.֏}`bqAdKV#%ŁLf٧crń<}On_|Tͮvo{6\=HB^zg~Chx[5\x%jieD2 *dt< c(&րԥWW%H9ZnBeы|Io,lX} ޠOrX3xXӊqt `ňE^Tu8c)xD> Ne"ZVJfGg+|P>DP s=o;{j!ʚGR*c%fĉ(~nA62G-u~o6uLj(iP:[EoVK˯W~ @S(*]Dpјsqw]~DZw;^ ^J9LA2][s7+, K*=Vdˮ8D8IcNIUPu@am RI`&JebSĞ2g'}^}홺CRrJ4H_=qGt:\t,}3ꌳPӀIuNJo/Myo_n?WOo1pnnxzN?|7o.yoF۟~+a߁ ꕔo|1&JC Չʹ6SS'A+tl/Cw70ƀnzuVr5uV>Cʲq5*" e +#Kb$FCb7ZHcK4B0b'V 뷅Q* eV%K=f\I'dPjBa&c-0Z0Idϓa?On-QRtk/a6JRfSoD#%|P?! R ^|58v֝Ǐa(~t(Tj>䜙VQH]I) .%UQl@2ݛ%%Z- 梊QJXʹk%"P(*Jc5'k j)Bl7-mV=pC]0·L;Nd F+wET<̻rOol*3ғn/s9zogQNJlzSV1C|X{`R5AK2F9 a ?_y;D~3z!~JπO;v2a|5|Η9J?^]i+c'ۃ)hdƙ ;ٍ㸦QTeajRPuP.(* t}4,4hJ<ݔV-80~. MYFE D"L:*S>F -Eo4E-< +Y -#0^Xk/$s"_CkS4ӂy,#&2(0!Z.̢׻Sj{Y#ԭzU\0%/geI7x`#YFW=) u N-W#Ake3$](0W%Q=E<*>{5^yr{Fy);̥~kqqYzlHNrƩ&֪#e].fxD^!8(H^eQtXTem&'WlڜFsmrW^LB DO}fm&۪YXU/Yd(QB!R&C#HI{qxibfG{`_=j&#.;b@ggNG<$dheSɴnl[ T-m8(&7d.vӣII6H[Όn; I ^к0I/ GaPM{s۬[$ydfJWu|d!|*ea  pڧsB%6i$źx?\YgϏ>eh ĺ$<Ů*& q9~ƥi^QZ_{i%5R=G,T3S-1obՑ 5Uj·Bh `%+pB%r5af[ y&61O7*_İ'Ҵ3 2_9p8o3t6|"TG+03vw?VH.{<# 0 08F\8(cņ>`vki}<ڰ(Ȓf3[)xt0'`o-jp],k۽p%{frI뾬#tá$h9J. 3Pb u_Ղ+@班B&fvijiu**':- 0VR<.U"S_(pq]2ZY[(Tǝ^|aQSU!챐:HY@U2 LzOŪrRkĒgZ(9i%OA=G󃐊ї@Z]}%^(q=Q_Rh/0*Y_t.a&v8=>.}MIf7c|9v%VGnO7: vMWSMn>R-pԅk$&:8yqB?U=KZD]PF* wបѽIʽMqC+s;Z6E-2T0vugoIٲD?h@:"Z^7X͑φ>\L7DS.7rխU&M"c7mHW>.\r-"Av i_)WB{b|]>"-Cd ׊L) TXS Jx1~~wC[G7-Ӣ#=?&'vv1%)8ƼOH:JvuNPq%#]DGLX7%6gmc$! W'>Yو0NMƩMȥs#?㻓AwٗOnxuqgh8>Έ~.=Rԗ!`+cVPʃ,B2h+7N&@ȅ_φA䒣AqЌOubmWFFjJ/ f#~Dz1Bw6k[WH''hb^v2.(d`e4z˹OoN2yj 5{QmE mvd6_>yRFٻ=-*+AarwթG& a}tW:LpֿRG#Ebf/ܚu#)a izcL=G2+_; ESjX^5Z|%KWS&>"}D}%4f|ϳ#GA5ޤNӸG eLCD-nb(y:q^?1o՗‹ ZRzF#|Zi1h|p U|`?! nnPʏ> "`ێNvz;d[B~pRmZ9~Ncu~={9)Y\ͩʢH֢N(QCpxW؝PLG *44@0Ť>cT@7SZMԎ%*Z Z(L$N+pV+f (Cq[nipZsh"V9fQf!r&$T,$Q)(i`8z6XἯzᛟW<^?Χ7@϶^͟]m:k ?kt6,Rqٻχ&UooB/Kttaے!zt.8!~JπO;v2a|5|Η9X?^]iVi4`\|LbSBx=kTQj|8V盋-B5NAλ)x DHb魳( B0o6nR-\Ha*y;GmPG.=OymSl{^mxuTK9%l4\I,5#`ɔnBmiX-Owg?mM<ĺBy+t]D$B4ŢRK- CQF. P4 +Ɉ2k *h,w f!8KFcK$eŤ"у|GciZ4^aVY`jMzϕ^hu;uZe) fL|#eiO72{Rv͛UO9y>RB(KmJ.RCtArag h&0Pep-i9ꐣH=OjgR Q/TВh( džEVE͉AxT py:Dm&<'UtCVS9a_94™a!BA7Izҏtf:B-^'ڼ'Gh%,J{0h(f4Ϧ(4Ir&0+퉑[.L3Z](j@BѴףfjP 4auVun!a*#ubF-kƺsFyZAis;*5J FP&1-hǶ*Rrn&"L7bTinCN!h"ݣQ1tYtQKfŒgJQ&fh0`A#&d*xwmHnoW6_*V| gl'_%J"%@)TEl<o Xi0:CZ5XVٍUS s֚Y=soDA Q(g85VfǥuHj`3Ԃi8Iڰ\p"i%rv2C`E6`RKr߉5 @So؉Ԃun48R@RBeQF#jwbʑ? މaV; ޥO1}$V=Ss oL0q] o2T(hb}[0)3oT b@3L0E ڇj$˙VZ=e0F3i+i֗019Ps]qr>a n 6<@yՎH y >@E6GC:B`m{˦oNSJK'M̂3lXh, OM&ۯ]rdQ/2Z|7<=LHI<7XO>PIQe]sH{zg]tZn1)w=L۹ۡoj0N(r2X(%כv?MmL*}1]6~k9t~P:Dabvj/'5;(.$(@j}Bޘ 4ޫ6 V>Y%zHVVjg[ZZВJ^$ -UI"g[ww_܊{T[4궢y<ՌIKøiЍF_{LUY U>nQ-0Qx0J'W9|ycb;1:GE_]L"-+0I~߹Aߝ?ZP3)k5vv,Q~ ~nV~l>5G`Jц|E'☥cׂU_`sL;'+K{}c!=Ū`g7rHBsm%S ~]ݪ_N HtZ#ju;njŅ>]Hw.d-Õ}eO0VU8/{˞r&hBTʖ3쨴׊,RzaMB%;Own-W֎^fK6|ܹ#a|fS+T"OƛvⵀoUPj8fL9aGA'* T D UHQY{Z -qxM'x}9=H #0f\ZdLq38Ǝ9Z5x"ַLsy L,==)' kT dyN&(@'H7}kpz_=۟&ljDotE\"i QK%,tο#(TcB\ ~:ITޥmk4p6 2_fvq3OC^y'wRȗ+>w*|GPU7A*Dgi6bza֯7|-.tlt aIS8HԸ&CT"7{+[F"m`]my 9Cq9 7 nxǠ!3gfzCƔRqPELzJQ?_+V9;䮤*W8ܘ+mnj83IƆ< ֏[Ԕg&ùRJ*q8GS}ԲC@X@t`ɌHr] ˵oںsqz{@a˔y%AI|F ,,}1W-@1G dX^& C XܘJoNsǪfL.ٝeq f<TkI)>ޖ_iQ5Zgo$+0@|ݭMF<+؍@#.!m4WfN=fpD Zxi40 jNRq4m0/ih4G*) ݕBO!b.(P--l 5&g㝎Xvcʋ 0mtsSS}M!8EG˪(e<#$⼢Iw3^ji|q],!JQ?%ď,jURf%BL+eUg3!fIȏx^j ԼO܏>p.er!Kf``oh3~;Ɏ-;W!A* GB=^C;^#,Ƭ3mtb`>xG??e}!E`i)5%^=|\k/MXSp$/|NdWx تAɦȸJI'u PV*?>3PȺ#d_4C $;0Bui dEg AEiC(*_~p/4J|tN7.eŨZ"CRz/!xUxJI d XY;T xȩŸOԇݾ[#rZmcqB4A(.؃V YҊilS#dttso0¹1W;ޙXOFr41{[(plI 4y^u8p)a uD'Bo dʀY伒 *5VQgk s9%Zk5Ef&3ǹF Wrs5wp "W)_^_lDcٍ\?fOZѡ߹_~v #ZGw9ѝ`hSsC(<fȥv⼯nF ^#$i:4} Dz'lكٻFrWyICky"Mp 8 zϑ$~EmKm~Hn_"YU !]#M\1.i}.ܴ95$cbZ\L2ΓqqS"h\p*{1Y}/O쥥52R`ŲfyeS )3PGB~ps.<σ!6V<ƒ0fπ93KιR QpF 4F+3f–z?CeYUgcG͖ͳ3wwN )f̫ NjLVt*Oy݌C0wpqpda8HkaW 9öFH)0 x-vY 5Fœƚd< i\J/[da۷@OK,Pl@'w,rpFaZR^OFzx>ꑜ{Y}|qOꌸg+#Fȴq}>n2eik9u7Mb Qf/7Mp@.so^sRL9AN1~(HVd+aZ'gۢ/zW$n}<+˫ j \G=r۝LsPTl8m@Cm̼}'qY]eҥuu`Zn_~!}ʐ*iNܕֱ2pw7axEPO,ug>cxQ/:K3m8oXoSYA4^h;pzP% s0aodwb\: 0K_~GF X 9 F̔-Zh`-*BSI-QNGN|ڑ^/Zq>u:EH[MkO7Fqz2%jHM ) &`:%A`‚d &bP3I %Ô'K tٓ뢗 8RL`s9_i#O%͇(hNeH ZM#|ȣd=IZ;qo2YARtbegBYvilUw2[Oؙ!]]#rDl]p pn @gd="0x@!J @dXO< >ðe+^2%Y2XάPӸ21%d!΢I2 C5U=z3Њ Z:2@lhvr%r D!YAq<( za)eP+ΘarRWTr/enxy@f&/Tsz,cOj{M>!/otdA1̖ZId<Y3kr.d]g%;h` '(E ,gief \=sA{.Dq O/UnPZz& ?f$uI5Jﺹ֡A6Gq4i9é5#Hk-4j%>2KMBd6"=Ww'M6f)4(DcBȳM.2PjWh;Prġm0gXWF'N9kj4S5.&-(\j7dHLY%MXtބձL^J[] ^KPgLA+CV~)7Bs$,Ljܠz(wRfMg9 0D\$a$ܟ(ؔr1~L}Z_Wןv߶wǰ^p?^dG`%zK(^M5?^Ϳncco]%}j}??|<|/r7NJ~1oK`hxE%3s$-N:Rfȧ}nL%I=wCO4}wi!z* f2ҕZ:8<"F%A5I%mX`j^y)󡵸u\󪃷+%܊?zyj9f{OG ZJ3/]ן>$nG],|b[z_P B׺n-Hf fi!odr1IYWRB.LւiGs*R8us}XqnՁ/Eր܊KܽT:7=Ck-T3@ "9焁Yɭp v@9^ؔHE~@iČ*)F2ḳ̒ީE %XF[d2aГ`~5V^Zd(8L^YW'yTWzTW 28W\L4E*qQ^-!juQ&e7o?͏5%lr5 փVQK?kFQwn-4uCDjDXBC~~S=iiqѨ8R;mwAt`ck?.׷7_nK7(}V㊭ߖ7% sok|]^{VwkvjTrDΥ0E1!Ff'RXB`OI~ӹI1Z;;?@eή&Ҡ^V+5,DOJ`V i"gWգPPm:XUvUjtsM\jwКVR|I4Hlq:~[]a1FyCf&J>F>^<cr qZ%yv_k{"̭?d7 i| po'sJǫtZLUz?)8Vɓa"R/k v3'l1U>Ӛ<1dcQLԌBhB:eH+̢|`XΖ5w >9ygt]Vx\Km gjbu+ݼcHmSq#Hsm%70j^e;.HցeOI+U:D/5fqBJ<*R#A*\-d ?CC08儦K iOܓ)VJ+^N|Vqy 7֚kW^geG!34|)B:")X҇RɈZ^~&#G^sTHF+#w߃ӐZ3GBf9g:((X) ofu ߾5i\!@6*;Z MTY46:K<TsiYִ| K [/H95A(~+1#^aWB rW d*}wr;P7G/i 3&+\<>+KqU{Z.֜SOo0VgTBo~tgz;ATL;C#qamgD1Cvin_醒6LL5D"g&Qf󨽏U#0k]clT>LF 65K 8LXPUЦ**(n.br޲#$Tv_ֺPU/2?WܣhTL F lۀ\Fd, Bfd.tn`<:ThZV`, 4=zςVOztR*J ODQ^cS!2pV DΞӪK~ȸc QB%5nD IjS1nA:Ps660=Mi8UrX>qCT ƉLaeV#1/H5@-'LK$Vp/S4ԥL_#6BFR8BtR1a$!ͭjc1c$DuQ'ө-wRW)wҝujInIVʺ];TI[j3xF 0Sy*5 %:i2+7x^B_&3YG,nMdk3y~xwR>/>2Z˒k.n@a ,!8SX#҅-/͟y-g;`/xI^[.UP,^e5GNjs¾3'$_6V :7Yko-7"--J}+wg 9Ui:OS5M:TutRXNʖoU`4G1ׅA~>'rXЃ{.hvWFA@2 =֠=9e)%#֐ei+Bkh%BKd1Uiז-_Fm8Kڢ-HGl5CaQ.8Y =}rk̷'kj&rr]~ƈW?ų/<@g2ԙAk_elykp}?ov}|47Q_F-zƭE^n[-Dmsq/VwΦULJaIZ~M,+~sq/4QI߇9?v~Ȍ%'n(9t{YXp\,kF2ɏꔑ;{}ٸhLܥkrB\!ڟ,\幤ΰ4&6[ȸ&J6G;yյƵ9O-ιhZŅǥcVA` HA{tA T?ҽ!$ȓr{%ԖzP]Tq A.V(唂ŀ#F KDS9WUZ8!&TAQ9"|iٽPtcFgª@RlOf3;xL t⤨x:ՠ4>vtmymbN3u}&X2.oO:*iyvҧNվ^SV5 !\DkTp®vӈAŠv; xh[-ݚW.dJ*h7IŠv;N4նvRR5!!\DkɔRvhTs{4=Fg]ITCy?׵uL]܉T:Nhn&dvAYXmg^C~|~,J4ٓoloۢ6MɵV0aPȉa>,Gc`w6鶕DؔUp'N8}Z|i=>XvăbR_(>_N#ARsb :,N@eNŘ yVb2ؕLf_C plͰ <(RL36=SA)MVrfG0-% /[,T9t>}0?~Ʒ8t;ƒ5u7w!WvP%k"Uoì6 QjxMdJ3h3*Ƿ.h2>d{]>zLhjݧ no'Rt JA^漷ӣlӳw{˃PO {"}BRp'^ ѻ6 Kcw*Vt1`\͂}]9lA1k 2-j׾&"Z/e[{ -sj]{`}}~]t˃>d'`w>1G,M4u4«<{9):Q`B/!z3zQRxO78^.M]|yأ ʖ_/þ:1?ZBd @o?O4j׵+* FD6 u2Fx _}dd)} ;Mz]r kڒ]wBigɒh[Ν3>J0üW%Ou#ʉgui-ul0z}D w2#Lj`rd%ϵ%҂vWQy+_n"A][aŋE;u}Bg9ϵf#Βuzw{K}V_wץiku"rTAHt&@]:y=HtwӸ"UX ֽbaS;dck+1j6#XefӝsC|AaڶOv]:^YU\[~=wϡ0J/ Zv4\в<(sVuҚgښ9n_a%q%EUzݸNu◸T}hK,Rk߃&)jHùʒJ ڸoZ_#*&s*cn3UF[EDl լvܓOfk\P*w 6B؂( mu("dctuӀfn;1`r@-RejUi~0; Ő3;lm:f=jqbHa]`x]<9}`T1d6vf8E5.dh7w{qdw HqF^l,<*"%zȂ[$5/VlM &)Ǹ(QP(S +Ťwi44Hz_?|ʽx2^.S"~JZkC1;) (/9*:Kem^0+>I=fosT w< ºz,/Y+Q7,*Q ^Sh.eZ:']g:?Sw{,v~r? jvvB]©_~E\F^<KΤYbC@TN!! (bRq#qU A`KںF].k}+0d">тc9`hAJ0Zv13I]RWomkyq/|;|4םshJ0.W'=|~Rz`:]:;oDDHQ7ۓQMgK F+Ε1sDO>m)*uGCrNԄŅo]eP`BB֐m  R|yס`1 ;Zx%5u>k'Y?w>*Ihë,#"9B-^ysoZ37!O?l I>/|^$(g!^sFhl !";͕Q& b1,c-Ъusv:8JJ VmyɻsNvTpgZ+BWJSK8n4O.)D?^](AG͔+. 8wmX1w'&X%*[bn_ D3H\"7m3 pk[}Rp-I -.!Th \n8Y$\8{'K<+n޼Sy:`|y 2E2=@"M,#Ԃa2} I&QDcDv.KC~RN??QuNwaz5;?I߁kss?SitD,:7s(|ǁ8Z!:(K 9ؿi` ba&!U:Q[-p<0vsז}VX=.܄!IW/e<{{9)E^k e&?Y=wѺ/<^(EZEiCwA?ͤdM\o kr}(,h~oCt(h(!3B)Wp-qw o>M&+a72Pߦ@ d KZy[> :dPmOr7)~(QZT;1d5n\^uJ-V;sda^ |{o[hjY"i/{ A.2NGOقGѾ{xxYMpedcpYq9dΙ|o/Ltw̤-Nm{sm#^ FweQ[Ngm9E[JJB/>/O`_)K݇ŭN}͛hr/Kn0KjM]POWUpH}hk*n|cYptm {I링aE*b+lDxFqS$BȘ7xҔɬ hNsX .Ե~0X?j9H GM081Yӭ81#Yd՞yQMJ7@XwN@ <+>MC̴ΈS0j1[*PKb;oQ_GF,̪$jK ҄(#@'(&1K`m=9 ~UkIvl?t-ߍR2d\$4c#ic#g=6*M^[+ ASQHS;;r!,/zcwUwgǃ}6xzz˳sd4u6Q\x$Rѣp(ϫ8 '2/ď.0e(.,m+%[q*Բ/7uː߷"?xS5zL Ї} YQ(f0!FILڳ3mMȆc UR:l-+h|=3LgpْNqO)'LkxR{N tsfRS`J% oAJDs*OFog;p83I׫",UjVȴvT: .`aӥ`Ys8s~ËlqcF0МY= {USsv.im^l7 fuOr9NUI[_NIO ^t,G^۫1 9 _ϗ0lƳ$N`ػƍ$Wۣ/|@0=,nd6_60dsly%9 )YjId=clc[YT^.bh`h*@DnRlZFm x1 (;vߵzo#HKC3}%~HZ\ŷTkBp:%2L`eL.Uesa,VRRX~xrM}a}A;%<|S 8kDB@)K>\Ώ*XƋ%BlR?]E!1i0!`.?Ji+_Nb/$iՈr!IX`o{\UAp2MBXx~tqh2ךp@JjlZ ȞRPqUАL6U0;zAmf)/M$ΐ[nA|4SwЎg/!7ҰcRZR,"\]1CNhnK-.}Ĩ1'OlW|x}1ʨQ P5W FDד[Bk]Í/"A"S_rBK` C9g`5Çr(GXiiC r('ʫ# [rdB93ty+iukAMY{{A=cD.`HeƖ`XO\8HcjpVV34.olw\ZH/%`ଯlNtPhb\h^Y@K^ADڕugƎ$+^Qo#mHaTiJ3yʠ%$8pt|Zy9{DGyzާuQhn[fV֕(_N;(>L#)1~WWɻp\N?>ۇհtWE2;"YXvOylwTlp,_D[Sm(x&F;k-LFJGH@-VR ξ{s㸢9vv{qXpp/a僣XTcYg^>_|A9Ǝji\0y~sV$蕌h ?dEsk@:allL`f-f6Q~s1%&k PsBɎ‹C,#uk5 zJ? x,!ǎhr2~@),O3Oƻ\Dds^zpr -щ}Fv$)fl01Pu!!_)>Q"iىb>|ZB7QMY,O XL T!XV)HRbCw`B2MD*ˬG>e[[g1ɸ }XUΊv#y?S`Zl| 7R *avcuAص`S)7%D}O.F\6 eZK;42W ѫ9R $̋E,UEPXX.nvv/2+PG,YŐ,ҽn4P> DNaG /:7f)N"a:O,aFYrO_;̼*UpRgXV- ^<㕰P>^Pg&SM= 1'd;Ѭ2Ãf<"F{MȂӞ@:I3Ok_r@^_}{N58u+$Q˗8Ms|us8lm ÜP8?B8+0/^Wȅ?MMrR2M޻\:52y\Φ\$ߟMoݱ;6uǦb:O()U52Υ 4G[+4kUS4#|7%{[fObt=d3|&wugsyۋ@=мQ/pĸ !vw3: xzwNWm.)0 k?/j65WG_,K/>Ň!~spǜZ^rYĭ nq,[߂(#2-`i&'ij-@܄,pj8"Lm![U`_ `EF2i $*)SBب46 8ڂSA4ӑ UaQiyY1sM ÔUdLӄ%Z&1<9)eвpxLV6W9dHl2W LTh\9`*7l4K@^DC^VB^ WZFo]Cv6FrqCus$Z{@|]>oݐZvS|=ySUW#\;>s{\m^f30xn.SM.3K?mފ7;h&ԕl<#eߺJ {naT|Ԫ0vp#p\`MjkK_tS&UHfҽ⦌Jn.t|CvBnŵ.?iPo|$c?#AzlUXbUw3Қސ~|2i1k}L}~ SnF]<ȘFSvߓnP-̚gP~af42;NhitE}+.V;T_=&%ES!ԃ 1mYAC'SK#M w]s>4ܐe5&n,;ppQ048Hy[-*.4 Bxs)qf kNphDqC zCv?vPOp$oDDEk9HG9XbIJepb ZqwqJ0L5;:Ho-)DGP-g8]r,҅L -3@4PNd(%e*qB;>FD yX\moKMS"0ʛ^Tnx7!?fΌe͘U˝+BuB15Drj92jpZUl@'FAo/lDk/ull@)AR-9^+LTrz 8cňIE!Z'P(bQ!2ɌF))r}ꋵ#@,-ZG~)h3Y1j @3K3N(cC[n >nrh4*~tNlo:Db6V_wjoz ?wx$2HLÆsv|"Igo:@sM>G/D`bd,}EH 9⛝W3uV_}uuL@fo^0 (c\e`;y t=ֱֱֱgfy&2iB- *ZL(ZJNי EZZs)ȤE>(]χ̆O&R CBŲ(Ub.]sJ_V/~#A_oXM%zSg"{oiP\O]:. vz` zU+s |qv:w}X]:on9nI).[/|Ω7S+/]?fh;ֵ׷ B<կ ~׍@!D92YRB 3l6`FtN)MYr.(S(P9:4O)"( ]dJ ;` [ / !F.` 0 e9 <$SPJ٫08V)ΰToxS+Xdvdndno]Mr쵻]0v{lc{}< vH`K@^Cf`UhF%a"6لVN^#kvAb 琴nnTK_Fd:uT*6~ R/dTmn@!D}Vv\ul3]3)+@Yd h-{ 0 9tcڕuAtkpEZNF$OREdbM!! >KDU$)Ip!xA$,5)xbDKaTI[㫙Ͻ@*(KN\2Mxp$ 8OMQ8[` JHIeTgey׊qlgr=%D~;E50/{-g#rͅM[v `/}shsI./5~̘(rC`W]]v9Bv"A8G#w䁾>WdACٚj7=Y7>iqE^" ͟^BÄ]~40Ȋ@HC냅1 ;^xua?6&,A!Aw|RYWnojм𿽭H Juiw BB]*) ѳNNRbHa4CRyxbM'gínj̒"dG{Bty{oB" [*FUʱh&2 guFgͯnbt(bHk Ѣ2[w)4{v3=κDH*jjEBQE68 nopvR-w_U9"AZ e7ŧBxHIpo2OMe7i&IGY{ETB!> *JPQTnH65jU*VF.7llQ g9BIV!}zD1aHmrF2P|9i|&!WkORuF$qsAQFYиP+eTW]RsJq`tV&5:ÅQ͚g$F2 ɕ2 NsM(2gVƦB Cr̩2pdRkf91beÂ3`BH#g *b*QJ:e~|۟[MRN5,K~43֟¾zCџD弽lC#h1J}]˖Pnu1:uQEuh0ũ[nMT鴩vct;WPr $Ԕ= =\Tgg,?6+NG/RwJ %XGT\(-YS$$z:zs&gU]CT䒥K䋭%?N%wk64uuD7gkl# Y/jRAJ~>[k.tΥlHcQ-h ˅HT7xQA kfV*vnƐB2V֊;\Kp]J^\+Tyvc0)RROfJ*rWiYk,ć{{n)kh:2K 9 skW[v|m]UgN+NWi^g֍]5:##0|}!XxsYų{n=]o"fT2d^ )1YV"!Ihʨ+Jg~QSa}Aܙ_ʕ4&(IY 5 #h (Z1}Ccg}"jl3U{/'}o؊!Q̩j6fsD W%zm!@y3bBtyT}$ GBx[Ou2Q72tlQlo70f-&m_ǭ!MG` eF꧞ڴL*iKZQ]`HT"p[qUܿ5x-3pzn1lD" ƃ}2D5Wd ޛP}?yr7N~~3}_NMn5=lJVixrʄP;53(3AuTˡŰhT~_/QKgD"!n9HCFw`ĸ]sLմ#.2Xr a$ -K L )JRw"K~vUfHƓ3m VLo##44r5D@Su1{yԐqFƂФt(r1,Hڛqbp<^xU ^/*;|y1w8|v dc5qM|꿼LpvxwȌ0g7LDRXI=Ʉ6KJL̽Z3iZBb '_l\|j$톚IJj&_5tw<(6zAc$]団ˣ Bx87v 9TËf%g; {;߀C>qKE<38.R0o˧OKLfx}gtuSo.Ni1 MÿOOLxp}}5tq>B^+LxTұ߹}LJ8\_NF '7A'68(|˻9xARH^_f@Vi˅0 :;%4qX/SW$fAς_jfAUM>Y6☸OlB+Rlj6JVWa)(sCE =`dFyXۘeNےb.@; U|*"eŲiu5}u~nma5 +!<)%h9ԆmPXDظf2U=#^,[ K2N~7PEFFK`ӻTVd[U)mnF 3'æF54&o 0 ?tBAq{W0Y0p$! 'š(su+ycݠW௟c(6Ui8ToaBG4 %@,|\Yl`XHSL^+0I?ٸ:|4.@0Jһ/' 5T%* }UAZQbse0囉Bzop ]!q!?XdgE:EؿOP<ɛX@ϟ9EG#x]8;y7krTN~2wWgsܟQBI~a#OӃ 9}iqMҵ$D\rd z%ƥZgp]'Ma/Hǭ.HR\ϰq w<Ɲ0]!6z4ѝvꤺő0ky\YL?_Rhm8:u'%emp;~ N\4I̙0)\J|H1Pnsn9͉ɕe"C!439JĜHϝ@cr%J0X\fГ|JR (̼|PonguE8ޝ4VxFuGZ1150ά͵ w(V(hR sZ͍'VQEr/yA8N* Z "*&baGrJ(1tF + N5hA%9 2^M1Tt>[ܕ\M ]8:CdU<`gJaujD)Ĺ-s.,HV##F3SLki]0,ԀPR 0H֪0٨[jT26$ qY}F%2K2'B.l -{)Me9T7504ERkW+VBO 6q5vEfe(%.j:5dT$u5,]_0lnz[԰<: ,,V4%4ׅJE >zSA(J^<4rHDAh@Q_ApJ2Hcp]IL]9@ښ[ v5׭iHa,𶶍;J^Vxeu0J]UHmw>@&ٻ8#WXyE]Ã]AU1}uuQj V22Ȉ<"hUuJZN.S)*t*E7=%+0\~byzufd=|DtĶ36y9`iѩa) n<ɂ+OIb~b2$p*~K@ k+BK`M8ߞ8MT[$Ij(v߿?v_w:&_(Wﲵek2uF g]W 2KAUݯnbPJO ݗdWzZ EOvvЯ+ 8j~`U)BXa FɁfӟ&~ؑZ)rsK┓L^=./6"9;Efw_3}}Z97Xz?UZU":nF,TSml$OJ/qd FzuJU"uHgS^}~~2LtEyyNKz}}U)݅qՀrړ|&ŦUp{ U&ݺ tQN8k6~ :w4׻a!߸)P&d1K7!sp&$ϼ Ȯ:{6mrGf&ćnuh;wK3ߝt$]ٵhI׷~\| N/g7@J\ูYu@Ɲm\$S_Xz\|KJWop{J30 2hu8=v!y*57FĠ/U#' _D8O!ҥsONHÔh5h%\|?co\]|(cfyvԑw5#vU~(i#鸰'•[޸yOg|M+Iv:ECyq] AD=U;-T= _Vϲ7w)]kgXap*pV9/I%b+kT÷(|x 8mظ MUs0㡗|UQ-rJ|o&7z 8ڄb9ūt={rS~VAGU$nciPDAu'P:(y*HsH E% ٿ#~`6Wb9S/?V..WF[Y 5SԼNQ:Efܾ/n+)NFi7J4x%8D x-='\55&WWR:9t׆.邈TM]}7s;!3[#'G "x[St&0&9}M# p `1t^*RF**q{G Z"Dp]@hӅڪkcAR&ѻF38;jl/.KGۄF6ʪ{/v1I_yp^Bx"t=!IBbj.iTSbHQ-=xiZ!QM`u G%`4(03mQ&]J0 \bC/.mw,GdZ34R&#6Z1f48cpio5Rz?H#YY u%ZUذ㱕 V+M5&I_MRQ/ūCvw1->4f #y}fB=OQ ;υ?((٨K1Ds¿0Fif0hy%*\q}q> Lj#h?{}YNpJ;WxCs09pJ41 !g)E WcJyGv'|]topնv^>I:ZrOPH+5Vq|\Bc$CœZϨ1Ycl\[Y;ChfH|QDmʓMz::%dޘh[e!Bcͬ5Bv2]\!Xʬ1rAjHwI-t^ג3"Of2jPoAC=, 6I&cXtc zz:߮n&{ϵQaյ1;AT4B瑲Z5¬8hhl ubx>daz>!UO7 /^Z\T~6nb1ǨKܚˤԀ:rb$#'Ҧ3XuZ3izi V/ZA .Wݵ,)Kg@2BQJFm999l!du3V`j.qn· Z ηB!hI#_)ahM@)=LRWPO9Zcfu(S]t?\D" @\I$se *,5I33 )\l)zpCFsQs3̈́Q>SlN(*b@AtL]G <*^xA;lT&iGP}v&'_ TO/yʙDE(G6J(bfXZ`4/FtlI0Y`5 iN.zdR2P/Ng0 *b6_<a^NH p \ޓĻqIdha>i<.1"3ASKxYoK%RagZ!%+kCurTot@hBbQY^~JCŢh'o3D!S0h.a㩠+ c#]FER)5Zr.TET8Ni^qEh*?Ai9ڻ+ǀD#j6Oo9[Ns6bBUh"FtѼѬs݈q DܫL uxKV\mO m!'u[N6|m}eJJ*#w QW*uC 区>T9BkW>LRuۗ?CcTRZ;F-X2g qr׵{R:;k`u6jP&><7޳ʔɠf߹w]:L~G-3'zLwZaբ$P}]2 O?S*:.(q#;tz$m dA@vRs)$=I ;0a @ ͧ}H_T-IRpBR52ʋ6&[&&J VLd`JpU<\s( ׇ[hqʜA֝jvZӢB3+W:׺[/!,e?'BHT0.Ky…` :g{ 8_u}qP6a*?P^JF'G|e *Y 0j̉y$;dgb'6=ɵKr})Szwh#qRԪָ"@G]v˟r~.M_˜7\nnym(h==})iy~S !$$\9Kye61墇(!D_Aז i™ww8L&ld|JŒX8z& ^x$ 5kcԖpSKDR 'q4[+5|3ӅK0׋X-U z3CՋ{if,&l=Pe>wTZ\/k AR{öbqʸ^̓z 6"AVȄ~H]Lm/U\ ))\Z*x{W%S:μ˧'73op'<<qz 'LVKԌzI^~v+ v~ϟXS7hq8GqA~}ADz7iS^w^or_}V^M#ȳp *V6bֶy+_qHly74;Xd:_03] },iðg}$#qhѯu8Az6 p:iD+BZV& NޝW{j3^}{XB8o_~+ةH_XbL<.$)>@(2ұy!PqRx5-縟<~aFl9W肆.4tTe"fDq(9y~$M[D'^狋XH9ɰŜpVUs#EEp ϥN k֨(8*5~j|~љZԩku / +F /"c o1L>) M> T樐ܚ*UqB%Vl^Վ)ox;JQ2jtZ_:$ j;qO:+$sY, +"ةk^Uc$8J2Rfl&7O'"Ub-O%`O " 49oh%~t2>~n 2YX?CD.](wuQׇ#Gd̗ӈMr /PL5/AV"p!Cs_5>vW8sNZNK&3,ɹg vDՆ6Sle-%8契HC8j3 i&1݁fB%ۨ\mfs?YP%Cʶ0!LX2ZEL@".a)dJ$fAS[xI0ZEٻ)&Vp&{rՄ^P"p83[a.mUAG$i<,׃Ȗ\WۼE5ם!'ۖXӇ2YYپήpz.]WdK4DGA[,$S)\4꣗ݥU/xaBj0֞@] |#7ˏTxl(ihJ}WwPAij5>կ_}hP㊟UNZb)Vevzt)&DүDc:0_{eRl0r5h3-sWJ2J(> X},PB]׎pl Xf?jbޞ a!#VJ5 gO±jw­ήϠ1Ž{܆\`R7u_}'7f\OÄI5R$52/ve~6{u9K$eWBo~&psSBr̓jdfbF;}nVtK%݁|#e\Duį׊ـnHc邲㟥~Q2w֓p)"OOo[7Qk5pD˃}Gv+ȴ6hh&Ѻ!/\EstJv0b4䅫hN&ݞjٴn4;˃}GvSDilh8Ѻ!/\E: d2744 {,ǪQ9%HJЊl<<d!pu(p{e,/DKa[u5ޖˤGf}jd.JS0UX2`JTZIluQB`$wWbZsbimDkD]L{"Vb'7[96tY\mB)௷~tb`9{ib.|u ~S?jOvi7O+iEo2Mo?z X쑳v+rԈ~w#(_CމHz&HA]%_! Z \y'ʕ;ҚRkuwcB +)i @(E |)pXp B1+tTCs6`'gj1$%8C~4i "5ϱDd, GJ"v6byg Ec vD>EKikH'#/a[$6X"sZ^}DKD"ǟz"a|uɄwdւ}:cI[ͱpx>pVs-J sδ 6\pլf=-q O$q}̗SoW}_cIUL`gG4t"C{w3KL(AGkuw 巏`Fl=ՙɭ~hZd|8Ԓ^ZOh  {Q7K&5a,9,:"K8G.XfGzֺ |̛uY;=w  N.ujA0#)i  Ps 1Bha$^:R35.AJy(x9a Tm I5Mq >? 4-jD28 -=DUjYk)$^ZJ!YKӆ6R睵9j)MPiiZz&@jx>G-e"MKuq)iZZQ"smsRTsQǺ3#* Ԫ8y^!A1/roR,0A3Mo?:3˟\WR69I|*48J9):x.x` w)ZSy 50A`, Ǚ\aua NSWil?Ság'yB&z>9]{UTG¡I}B*Ed+R҇!RsSh6wpYw?[~zt&Y;ɨ8-hpŶ~;@IuJr:J.CČ-]Dm8RWGg[<wz9g3TNd T6NԒK,KlBe}qr+QpsV()3Doy[NSkwIZc֥\;J<(:{u9<uAXq`E VGCISlդcov&06GžzACzL.\B͒%}N{@N|MƷ~v^yp'ڢ>E{2̰BR 1l}(Vr MP"rލg:4 y %*!{MZ0Us`)rgr;ŗC ^% !\`_λt;1༌L;e"haS 8Y nY5lʗYk-s^1C /E +aNy8X*1T5!,r[Kr86uT]ཱ([SXZ}/IX><F"%?"Wo~~I%\.GK?ޅ Dh򢼞+!}w_k;fq 8_W’ HQ⯫j<~ƗP\^ܮMxV\خR8Nq"ůza~|( ; p$]E$>-A]}™F=o>M#߆@T^ C`%tFXUhmAEI=GM:Ԁ. ;ЅE%Fv4еԼCT_mQ:m?m(ounZJQVTsӤg8QKqpD ڻ+&x >>S1o6F??E8ƨgzȑ_2X̦ AҀ=vi-K$ V,I R_ FŇS0# d0_Kٷ)~@3skgxA8Z MiZ 9l#`:9f&]λӴfmtAkRȋqێK0(_16+O(EJ^U^{N?Ŕ2ZjJJY>Buk/N\}cB@D5`A,xkOTed=]Fͻ ]w ]^S}1WOHQMϰEUIF}B]a"Ls()Ppם/.{_t XՑl:ål0F4w|VO%zGj^y} `Z=ga>|^l.rE_s)LGk*A{%TŶNV{.Cч*> ..$1/9`汼Z)յ*m Mr=Ɇb3g۳Ql(&\p-` ۺnsy 5y\xF ޺ɳYk2oz[?օJ x⻦Z!bNE$5 Vx1gHZ%gpLxQ B^ UיkD >FBI(i5ACUp$dOH^gf#6%@I%s1ꘘ6jI $N񨁁u tBx4+$fǘr’CKm2TDvjiY@Bېi!YE{Xt!ҪEɾ4/LhM F95}G/l^y)N#L6dQʚ9ʒVL` $dFk{Oz 75v_9jč_>]{Ha)p媿ov|gg|ѯޞ-?B?Z=9IrJMjd{fvn5nmU>Zczpz ^g$:ڤoSwm_-dtEy6{DrA٘ jcydcMOXmqx [rU&urv~ pNE$ru JJ'J1]8R|V1x0d4W Y){f^ԯ7Q0\֬YTj9fB1W ZO՝y5bXj3"VZb2,*|D .E.ļUBغO'NpFY.Erso{j+Vx^1(F&{LU{LAtJ9cXǏwQF-X="l$J(j7ֹ7>P垆5#ls<+NhͣȁBz2 sO:**qh.ԯ7QKOZKeB"eR3rc-݅&js[⧭:V-l۱S-jAO[K+^trVW?n C Rpe>{ VzN(֞s. r; !CGvW΀)m6MІW(3aiAb7u ' 0=K&82n  Urw?2;MMwYrv?-\d;k{=`IS>UChE 4?#wuu~8V-2pk}Iy$˦PsK?J[/|`jGOfn{GoYCB6/ 4[bO:Ob: 0{,'[g߁)u ѿo:b`o=egT#pJ«*_7?sj1O8}ɇr\ū;]|ѯӋ|ryuǯNOO޾zAnu許3[µ?sb}:; \yG^[eК eU_]A%3Z/FjƖYL(d,3 dd1pB@Rs?zYflM f6J="o0hfQ.D~3Z_RrLk,95MnQYV`x)%8 v3vQqWJ0#5La^2Q;)%'H`)P$.3'g"I" 0UIJY=]͋@K#Ψ-i&ygX3EN.tBh)4LtҁZ98K!hƔE.Κ'ZVǒ^wJUNi˖ CY@&)92uTAhxԅTNܜ㏳pPkhy=BdMd!: v;'T4ytRZ$+ sd2pvCq V_<C>Ď85 %sWҁ]RtqzBr7.?9pfdȹN jf،5*']giPU?cM܆\ (Y.qGytL2kT<|Z̪ N]*S28O/>LZ\+qZFIu-02&QVf̿Ύ"w= pIqffMcƘ`A3GCj ԓyr`V.h^VѦU}ٸ Vzf1zf\|1WW~Tz~U\rۚ'gs<%{>b1jo˧W# Mõ%VU䡒kS-V;~I2ntQK{cK/ݮQCø'_g )ԃ1Ggjq_Dd I9Y7bc~ܽ,)UIff2JrKŗ..%e+\xPFUWPū}eK@N+ )Z$N,: ,npcW#9e0`@Is`Hs>v|ӧ8_ǫL]|_Yw->[ݙmqm=DVoɟFvol;0~UNHDE?k}^|%V|2RT(AJidcysWLY}cA;yM]\d',PźM?zDJT6U٫p~xg6.Z?ԧu5ni0b} v]g6*yn㢤8$9t%-]qUqHֽa&N&KDˤ2't I9lU(X)&ykV n Ay_rYRfeR ]@ 15 t&mX"lb5E$p[V]cG:9+^*Ms*w+x@@vSڎ宎V!= NY6.$U(kidnxn筒%U~{lPTJ;Ï+FƝB5}ʟnߦ_cg?Ybկ8\F԰L^*lLitCAiS-8ߣ|Dcҗ5KMcWs;m_rXo_> fqI~<>AL8v1GB0 0y;_o}gK7_V&5 z:4 Ұ27|n`֊s.hIYܨ7JVqsx;]0O[䖯{||̧{x"Y4Yf'lF!N7i57s?o-RZ[]A̍c`j Q"|\0Yq.7j">ԟG ҶmQ!r s2H6!g\rJd\u"QŨ%ՓKY9ZGLK\J[ X$mbD"QF%2HRZsxLT1޹d .$NQu1:qzd8[_xM XMĩ*sB>qVYHA~PH]C#j šgMIfYF3B,DTc{^M|GD9i|kT_>|= g@$# -n^zE. mKr=0a NLmKGo[f O=CFҖfJ .WbsлOߗ*l]K6}~.CgY2Zu̩Hg;};]5-ݓYJt֟[-< %EAi5e^&ҭ?f4m͸&\N:塚&1rրhpGG"5XqɃ#qoOŽgo: 0hU :XoAiIUgw5 r_Qyd>^yW|,ǰzO'N4NtHknғ7c>f9QoנF7؟\ÑGUbBj ,ۂဦ*>2K1,|fILR!1 `3&@v·7Z+P@[,Hl09=yK̤O~ӯPiQ˞)lԍWڌ5^:T3V\-n((!0k e,|U :4EIj$D'3sUs) 2›j#8C1j;_s#nS(ytǓ )G5cv/JKdN }ͯ F{Nٓsa +h/lr{,P; wz)0zí`tz0Ǣz>Y̕+cU?ԓv~3d%POfPEf<ܜ`ӡyv=cGZ"fK<9!9,<כw<;Slntj͝޿6̚2L<' ΜyxpzYe7<[=4/Ov0Lםԟ*[2[EE$un~`엵݇s&lgМL~-faǥ%gDa=_ IGTr,U)9&}'-@v9#[.iUbtrɶMf?p:`WO|z/ vcט1yڝ^W se+&e% o\Iݜ’DbB#-K#Dɲ4փ9eؽbݦtYV^ԘtN _2H%kVLڸ4Y|W*lt8ղR~uǩ ebjqw3{.(>n8%Km=NPH`H\+'ͷ8[:^B82;=Ev=N3{ GƢ;oqRJ~ښI+%r렱Dsn]6pލS3uxI;/cwg'vZd̺3dmFk#B}}{{C9FPTA28x(1:5+t'ǧ->M;R/2dۦLrmMf y5ҏi68`d419ט#hp&YQBbB]7 j9jۤ/<˴(B4@ 媠tٸj̎˞BgsdYZ0Z0x/(TlN 5SL1@䴦"^Lj^Z"RΐY#-`֬*CiHA,S").@*c fuf6F2g-K球*^ Yq"R%IvfLWf LVP $-h]#2egTJ} ۋ6ejQd,Kudc [ ƓLU-s*)=A1\D U61jmPkKM<BzӑCW9Iwg ئ`t!ϳT @:5әv[rLZ$.(1p:,10NuJ}ixH_ G {ד)+T/dJDk jV gIQ0)%~/_ _?F9ogLO;~<1Kc Mg6OCB@pgcgLب^.n3j>OV]yZlXE\D 6:xga`\`<2;\ihڻ5 SО51ݾU2ɾM7q?6۱w s>^$n<&m)}rՔKL须)aw3&7&_ -:Sy/hS2xzߥޔ_cbwgO㧿ʂ]xޟym1L}f e5Q"XU"Lj`<h rIQ 21.O]^?-jk!WٻHrW$#G{ .6d25zJ=dV+UDVR 0;S`a&]U$G\U)6gU^j8z\J0>f#QбRm;?GJ{t#K.boKN& ?*y;Szf 8sX߄11E,>}D|g\F"D"C`PʣPߞmm6'QrHt XVN0E8ѵzSp+(y* S.>[F|ewSSňR98"a/m CQ7|)Z_(^{'J[VTC/Lely[֨:Xʋw#WdH/a]}Ms^Q7NQ.ƷcDŽ?{oQLqYņ,}Bn`ehŨrNy8*,u)Fu %ȹy -%Wlvv"FcDhkUg`c2)itiB8AYSL4*+ V!A2xO<h܋zHKޢ p6:% URC%bULծ!< hB{ HdDa 1ZutK1Xr+x4b/nmPj'&K4"\7GmYyl5Dɴ1|PId&҂Ҋ[/g?+W^8e%gFN428-B)iAxlxUVN6I)RS-C q}k:&F7ܣ) 1Fd7k 3!93 -v}be1%:M]Z-MEB"6}pMϧSp$M 1)ĘT vT ONW/W~ޘppէ?Ǥ <"0喅(O .@IoK[{zĀnЁȃww_Z|>. k,~eo}Άu5n=b-Gy! uןVqKV7"N>eFؿaz@w( Clh7P?eXXh_,r>P?>EhzuӆRY|ج|v l_TdQ@Q16|న W qˇz$FP5xʔBqhN jx\-bP,^Pտ ZﯶP6q}lDOn23YL4HQC+-!rCjgQRs|BwQSTm}bV7ڴMzX[k;fh+"cF;cɵx:hly=p@$}4!K&Lo!-}xE] "AOtK8dЉLJ8tUR\?Y_^ n}{ )wv-~c;}|y}OZfs,wEh=x_=,b-Kȧ<ަ,ݞ٦)?UGL@+_֔R?N/3Z2HY:}cw)-9̺/NnC|ZS8agn}et~#Ǻ30Lɬ[0]ubK5Ex-k o8XюY0>Wr 5 > \ịp2, a(3}ӫiZar> k=V871 TB*G({殗J: Ur: {*lT-顔"QC9cb'}-5#i9Ef$驱[D0µ{s1fTj7XkHqqlJ`ieNm7v3k[#HT{F8Tn}0J)҄,FP@a n3c-݃/U>] J;,+?Ɖ,zN8dҍ0Wk3`5Y j_U9=6IF56\rmmfN(kR^9YVdT#DL0)v)߽Mܑz\pxӒO L"8Iqic+ }@%-Vz(B+Ob2r#*4>BRNVZQ3fVNYr@XiJqԛƷp e ;Vhf m]Cܺ$]g/w;NYC>|Lإ>|@> v:nPI4p%AB +rx#E JyñJ.ȼ* HIǚTTgrt-eG़=d&Sqh$e[Đ&g?3gK.CJUQA!9h_? |MОpjо&c]'jTJqlpj~9UXO9陓39bn){_.eޏeI@~y8#:p<\ѤDL0IoBHLƾD5%o4 P !swҒ:T Fcޅ@PQ\{sׇ942ިvG't\nLiE1,*0aw4:`Hg&Y1gi?.Fӓ.}bjgböG>>ܮ tz 6 mgj^CZ!QlJNjYˆZ((Z€l75%%"d۹1O.&g stIr_nүMxoPu°Tz`Cn'}k:FM䩈ы=CaTߋ'q$=],]B z`E(t+LQAQ܀#w*1'nL"d`LSܻop,},oUB0ŏJDg78+Ip|LuO);CgHb9[qG*ҺVȺjuh$Aetb \Vܢ)X͛ Bꦪ+^(VOyÑbu?bѼ5j֐fWKUԠ5T6Sk:MR:ori#FRʨ^P Qjİ0]/_zOz,b{ tɸZ S^ Y{uh$whL :}) @:0b:Zzi*<{8R2;](+O4c;anTAac1gp6f0>nc)?#[ՌHEP31 ,ѡJ 1~zH;78SJk2*ehT0-k_+g&W]C@Zrb%sH96`)G*x4`iT,q%34vM&)w{<Bsx\ IT$է?h1i nrPLevE=11cIq3fxN ̀Lz% ^$n3dNEcS y$j:[ q^'IIo^f$寿`F'ƏPcPl ˾LV#r54 מjlťUHd?%9C,D#.;j8v{DuYCk tz/2 Iagrw٤LX)L63kbj%c+s!uGi?[pɄkfF[W*/% IjawvTKv >I;W^UU-i*xTP7}$ጀoȗa8ȊPBtCqc`dݲCԱ3BF1f}D%Ad&a xa?2%1 Y)xT:,];i S*R6mhڀB7ӌke0Z\qbGݽdgz(rTzp™ D$S"RNE?Qe3LadIj礧L˱q9Djv $`aQ8 dPhRRcv_k2r܍;dl7))%zI3ȃS:LYI RqtEN43׬b lxXem_v(T,- Up1hgUn:#HH-% UsPTs )b>I` c3&"7 P:x>"'%9H.A+F(թ $`HO3޵cbe1;+o6ۃ}D%vن/ߗ]eٖmeU:"QwHxxe@)y"P%9/ʒ[֨\G|橋<" nu/FAdE54L/j;(9Ǹ'ܚFu˨m U]  p3[u WJu8L ogD]{=\] h/;b~{=~*2 ;nƁE{5~Q' w)PE{\.{"A>h V_ U0DyG9lꈮkd?&ŗ -fZ̦у ]0D4 8";T2@ n~U[S{em;<59Ӕ(<{ Fbi{QK!LbLHBgW$p#W3[C&J% "xrl-4Jt[ nb
    7.C=v^a"w(Ӏo<[O~G=U8^{ ,n+Pbhݶ(=4)VZ s_y,sva `'FŢE{:{ n^嗴Ye 3~@ :\kjA+.V:^ xfmnf>f}zCokLP(MX~;Z9e2']@!oϼR7-i(刷?%Z&sT M(%O; Bt6s|%mmB[}XET2"h8woj( 5A jaxׂ]x-*PtlAt2Ԇ՟cvs^L*֐I_DN"U7 ;]xU,G QqM -f dLj7Y":WܹL@vtN^T{WP.kd"[P @_HRY^ .P!lgJHh.=8>5&)%Jf !;@ T Bc!*UYb̰Q* +q2 εȴ1ͬꋄ)@@TIfpAqjP,Jđ`!w^ uYO2b ;$Oƫt@?,e)]dŠ[RaE!KLܷ~3J ,aGY#@aFeRέ4JbT{KCGs#9.[&V?r9!]6)e2I$KJ*@(3Ij A"J~h%U %^'A2aiRLRiMn ΌAWRC*vҕYv-IMpSׄ:Vу^iVN̝YOǫ|;[HՄ`5+S5nľisJ>8RǫXmNU}hC,-R2oe0΋x%t)h1yebmKVka5>|dC\쪇Z^qS2$Sf*KIBpT"xLU q1$ @ 5^Ձ@6TeW^][DH S@P-Fq2a0-Q,JH2CdG02OH2Bݹ;[lxs&5fruL-hwvGo,l%JuDP.H˹øޓO4`g˓P]/3=f0[ս/VvZa"FPѓ߇XOd>"^v1|]]3\\@JMqOy\ɩ e-5ⳙPcQA#$"g:X⳥-o,Ėap\S;?Fԃ˓7ҧJ~- R7-*w[52#3R2"4# #ijBc0Q8v;8ŪC5Ѽ^@ і͖datCR9B: coӇW{_z.خAOb8;ܫ Ət .'",$P"p)WR ;I!>F'޶;*5pNM_ͳeP kiqO=Yp,"9s~wp[WsJez X?qhuSϷN)AJ0OZV:1Ow0e/@3.NbcГRD]~UwߕTPS@'Fv\8w~Rξ:gizXb;wr9ڧS ѱ9$Og)"OX+{F+EcZ݋_v9/x=Pt=IϾ:gi`ű]"EI~Ͻ:i]C;I.wOmJʅl/SI&Mr8K~8N'j{-(6N4U6jw R_nT6:7u Cb#xJq kH0 fWTCD t/ĝ4$@xr4[*E&W9[!tg~RX,]2td/KvW>2xP(? "??ɱ0ziEge'i ]<Ȫi"2YhRD*.3`v9~pY%{_UreyIT5vLR`%ayA FZ1Y/ P6fҧΔ5׮l.G<%lb0ΤvJMt=%ٗ(eQK+ oy>kc3}%$乐6_Q)+ҘF8{>S,DnzT2rOL~2_m3yzمAys -rS!ԥ{u>[_QtI*d=7^?^"sa܃qy#oLZ 9e:@?g'V&D_ϵz(TO@KEzQ 76f@8\oPA @Kazˣ6)rvERR6?ۯ@t T2e_ *JtV[zE<DWӔj#6VMLS31%Y `!&D2-$iiMB%c)XCUJ}~W8uĒ*BqM4KR¨,SQFKD EXhȜ 0:IƈץDb 1h2U8&i,Rʆdg& !z^N(=|\?;ijKN}fhX7n͊zڷr\~SSlD1gX0ΞU.?ƀ٦񃱒=!+=~8Ƌjȧd2~){ǕU+ !ؚr&zmi3BT=>Jff[^O-ױ, H(G퉑RQ:|sZ?z5UޣFm luq)e fxi!Q`\ xR)UP\֘ KI(#歅j.JycvvƾZ%ӌR! ٥VT-eRFcH =SW-7Ѐ%%\mVOUBFWpMxS/ 9+$C@00Ƒl<xe0D]o{Ç>ݥT_kg-/ -9P>x gכO n\xpq,̕kpo:)=9@3YĮ.GTj@!-W& x$#及]I(a1pMSDb% FN .3IVCEDhڽs PO3ULNf_=/.>\:^tӹfǏjT9`d,nZy\0,xUkud>|~66ç>}.s/$– Vs_ɹ7Ob %(Ȯ$ϵHWMBغqU-aI5@0H+fPPtS"8EqZb܎˿'vIfy)bā˞|g y=Yb|7 z?1zj5b=lIQ/&n;׸~@lo:l$gc}Ώst0˕;o=9d<5OƩ6vΣ˟Ld;&=DOGn6W O O 3W~Y٩j5wDI;K V|D~;oUh7̥NYDwS=ׯ8ɝ(qJ!;GN5d8&"aG2SIDt"=:/my+fX& ۠s#r;:84FJP*Y2R R'Pka%H9##@+Q SfWjXB&Y` k,A Exm"d/e!&x  SD|CLIbfP#C_9%@$Ǒts&[&P!(iOVT0yR erM=}qn X˜}~Ř'lTrI{E]g([*gW7\`=?AS6 ANgp*/PkWp/Pm쯼4*hT*4iO5j]jR8&;аpDUp!TRLDRU}QZ5kOs Eel *mn,j jځhk TW$]-_=STM[ŇpsK 4#qӳev: 8lwpt"ݠ!R928ǨkMcb4l~G餯fcԫ:ƌOsJsg{\,BG: %{q,ۻnb7 N܉O2sr8㝼_=;HAM}1Gq '$Uv Gua=;c{LDʞޑ{t8 Nz= S$Z9ػgh- Ϊu?UHSXǐ;lQZ*9~ھaԝK9*0G18~KI ,/[`W=*k^/jGHJf;K"2"`KxDRJn_n/ׇ1SYqU OQbK-S3[Hnr`p߾GK)=pY?« Ɖ0u'BOrbxSL!V\}ӕI@||oA5c>x_һ\'}^h;\ bϾkkLo Oc<9SdG?gZ~| u6FDkzvhhy6r wTX򞘤ۧ~e䚲ȡhPKny`Hssh4pױCy]H7+Qfns$JthN 1 \`ITS! 1;>6a0ƹX@0TE7E ZBs! ҭ$oXҔ(Ԇ1ZT5RZ+P0m-0Df@ MZ> JHqo.r%hr;QҷCp8)$s9|;9x9EEy]OjE A &jwK8jMv=7;+*` ȦQ)nJWM z)TIPܒڏ=7 OT B/9h|0TW7=]'{N|9o&Tcm&zU/^MO9&p*F|j9F,83KۍRRd!a3>lm{JgjnogzݖPD\a#Y+WO~1V!* R<>?RR$%Nîe7 ˅D}œo?򍆡 #lAG0b(X.)AzB9tڔUWu5sZ0'8Y1m%);nx(7XM[XJDV> ʲC`sE7\vV|͗^rr_}J_o i+i`Ȋ3b<41NHIz* ؁6O]rzP~PyA= `c {a8^@g&/!fօ}Kv!?Ҧ3hnH=)tb@IP UtlzF;uOoӣ(*-Z,'#H/4|A/{*N>n PO> rJ ZRZdc9R4pDY7RR#R))MS)QF򪦲`,+E-u,;u!*S)(IV ,U`Ee@e5B(VԅP)R:A)5*yˎ4RRt5kRsM +jgUh%v'I ?VWVSLy&ڃ z4zT(Y"'SP_|e,'_zykY3*/&ןn; aη 1H'Y\{ѹ#/WZ|{HpRZ_:f+U@&"]&F7vmu%cԃĢ>_؎E0) u,c ϋKԁos^$KXh:^1? V:%R)FǸI  I?"_U>σ-EJߋ?8v nu>ǝ~ y_QL3+'BSbbh74oi8Ӽ|v7Ch4 YךU e] ע֒R*I K!u|~Wm/b?]|X\,.k֐i~b|嗋Y-`NhHF{uoVyEmM\ax!(XAִ.4` U;WRJq>LB$9 K =Bh*u;j 3κ"Nz\!p }V[Y0*lxD`AKvP+kZɺ6( B4ZEF扈2`R8aK`qR Ln ۷_;/n OE 9xZ Hr7^?X.C1|{: ExOي< AJyBƒxS`z<;᧹;wׄDH2s @Ph:<3N\;t.o]8qvyӵn1ʪ+y)@ (iB)8"*%UX*,;C EO6fdH)GV xJ N #vъvyj4)dY*`E (_VOΥ5RMD%v!:R@mߢRV4 RW"BʾVzݷ)\1sB®2`!',.a5aVT@N6JvއS (^,}ZSbߧa |VラR b xĂ+Մ/@`S~~W|y.~߾v$Eac"1;,ϣ[x5%u"%Do{g$JݻpיcKg[ۯ÷!ͱVMyZ},h.>Ux3?˹Ia}|+J,݇jju Uz† S]S%Qs0 TZfv EzoFݵ Zh6Վ UΉΦnގPTp~κÇ?Xܽ=-i%27Z)c[RS; 6tuw5mu%,M[~j/W.̵p+Ei\=VlօߢuٚaDNo$w^^ՋG6^,G[Lڈ'n*WkZWl[Uc1kL6kye=ӥ7'wnQ6%cf7j-R^ 6Xn1["z-s)8wU tbEXLu[SE`Qj Ğf oӨz"Uހ S酩veȢ?]jz1K-)C':q}q5vbua4R+(pz[j.(obuFHz80U[:>lPwn16~n buc:n^SPyxBOB޹FT4swӝ8y@ӉFwra ♢=[ y&iSHl IKkz%X9k)Q4Dgߚ2˻IOwveOaJM~qiUsE鴞J#oݿ'Up #om9A}O>?h]Y,f}m ai;$?[)I7Ӈ u'$Ig $M= %IQto?ONX|t^K`LI8$LCdc%+.2a2ϊi;z4GD} fȎJnVs[.ğ\UJv:z;~>ܯΝ W7ٖԤ4r@(*t[B8[&Vwa˹u/Ha 3\<5Β)_._ZnrUrfQ5b_Z!RYrR&P?{ܸ/{92Ѹj^6>)$gEI&UduŋVhxÕN$"Asdw$-hDkĽTGGZ(4I >a_{N&~b)fthN4dY@~3UJ?0Q!sL JATsN. v)Sۭ9=>#NH\H)U֚-\sm 9iJt(8fT"L$"0Nޙn94{Ffkr23nHim̄ ] =CMS&R{Ad)޴#0 `7jSZ?Jxw!slo~ܿu%}IHdܺo:dԅLKթm3&"A^M5Ӷ45dt] -q-o{I˧WqqplRVk1J֓x{,u5k#%Ŕo F,~ׇY+'Ea1!I@skƑX %_wBBs_a82ʠ8g$nKVCڇ@ `¸M ظ%|G+S]߇d~~g>/~(wNֆY[U>bdH+#FX"Hs-11*ʵɹo80 Hyi^ɨi #d DcEjj r@%2~L[]䂡4?C͝!0F_*m<6s[(DH_!dEIY},ϰ;[N\ ;蔆/ɢLKGcSbPQQ) N) a"1AN58\jH : oIM,?} Ѿo!}=3(6omi&9뎎OQ  ׻ǠO/eM$c">WU{|V.Rm\_4Ŀ,$KMRcy@!#y/qLXx{(j4-9 =1`@v|1ٵȴf+2_ $|z$]D=$ jUGGz',]".Cea?JѸwD5dݍ>;]_7OCb/s>;]IB6Nimp4k 06ܝd1Jݟ u ᬧS=:CLWvjRe<NZJ+tjL5ڤ%?ȴ`Ԙ}sj_ OF7]e\Iď I)~|l~XmZRr:w < 64\VyB ҈! M2 zx囆=I㑞Vk@)ƻ`cwO2^\/bfW1]dU7"?7WTZQS ݇ӋHjwɶ%u=j[P a3_<>/>ה!Sكc66@.e^e$oM&R[?];M%eoc++ dpG*WGvvz  H?"(}($:)uW%ypt7Ӿ]v)܈h_hd_8:Ǐew`v)$SjerFLn2K6)d垈-It8:NaoԍmRg@N*;L1d u=3V#7m@̨LCҀx*urucB?ԡS9ގE [&m<31 tE99aվƩgțU CͰQ5,;G<~S!gTjˤCΔuF"mm:4aIJ oG“K:#PB/(K#7)ڿKN( er'dMۨU6込_l~_ִ UP~Lay 4Fw0oh[Xl^Y^w44E0Lou+f~z{3_=Fս/sպHF(xiϏ]a\H)}~I e@p ]/I ID*aX0zX,t&jau=(!hZJ] KZ`KRT}NbZ*xRj6<7ЯH5)ZxJ|+,A㵊L̇N\MiН+]g{_.V*K'a_y缊ى-vSӢ aZ5)zk)6QۀvC깭Qbi-Xj:Di T&ޗ2 L&SmI|Ǻ5M|d/&&GN3S-xAIܐمCFEZ%!R`xoȹG1:sl"Ce0s˽4*%?9),1JqZr 6=>#NQQhl3YTljXwkѬJkf$dkWL %8iZ`5!P&J-DùG<D>Q\QJ. laP6aٍ[K/ Wp<~α9v=^i\Njz8sRV:b/SL{:YHph½)6kXqZ߹рNV5] E{{Ju?㶩ke25ٟƵ!0{=o|_?'!4eR 29F:T)rYO J @ h)s2o*} 4k{ DB!.S"~m2 iP8X0oUy R)IFSB!:aRp'P ԷR8?^R9|R_n˰;>lsXV^ew>uڄ2&(6ܫ2)w7,4qY}M71oY} Br1EMRjNZS(<+(BrHm9TnG-D-MQP)zN-=$mekY ԷRsG-d-//^OܗZ%Ҍ,GȬS- [sߗ́mpG?aP.c{RHuʇp{XmII6έ[C0RdqZZJG-h-)60-=$}ʅk)8-2Ya㴴r2j%j\.~;vQyz޸DpH%/XfDo}:E Ec)JbEbu#ùgc(iٕ%>DZg4a(`  הn`ne] NWn :A MJb%J>(6^v[tNu޺h?,ۢu/ {h~k6K_y\V*?{ƍ俊/W{[C_7T)V]%lRSCR!)qH"f3l%ahݍn]תr 2/>*|>~wWtHVuA W,.nZ2|ytG>[Ǒ؃ƥ4Nvdt3gRd|ݞ#Ne*ߺ!vw"[٥Ol\VJK|$[i 6oQ2_VуM%AjgÂAG1`N!Y>pC,-->j$TY+j-6NU#WgaT(e\ NDk^ h9tjUӖva[{W~m13Uƣ c4-Yr"K)'~KGJmG|n~xd"j_굶iCu#W\\lXd2{w#2P߾}W&yRLzs510QI"W~|suwwq=i(O(x~2eJ}iHmw0lE{&lʍDMBKw!u;[/(ͨ-w-+vq,n0 )M9?]zG62gAp!UYh!Q%/d<Jƣ~L5o;s &k GEse\0j h 6pNsTg'lh"tU 1iBv$E'S$`,@њ(+0QH3kG,Qlѳ2+ImKSꪆ:pJ8T }p,Gڀ/u"@ЯS{AuD!֊3]R[(YǷ`MKoE$j' 1 u4Ӹߝ48 Yk(UXƒ.&] 3z;NK#!*"z>ؙn3N!7R?kTT.f\t)Po/1`A<3sʈj*OJ0M/қ+7sTZRVQ2†G8:÷o?dlןdp2;E=WJFLj~Wk/S}KEqZ{Wk,ў~9硥/Q}Kr'|*[*jn)%ov.Zʵ4qOLLi &jb:1y,xcf.)r6Oncye޽][SPk){TJ P$PʻNwL(.\/~~,oEˮ A" CFFrYwēZ^} U-2{>'R%x,9T<|{hUǏEb2g#829"LW7jI_(7XPѼlujdiXRD p LyѠLsgh3INhQ@-.%0EvCԔ@ T)32fQ"+xG(heRJ* ~$൝$cyE.Ե6cFϲ~upj`T&Jt =bBI*1,wD!_ P\,#rTh9*J p.ZKH 4ۊXP'PflYg۲F"keuH$V1:[7_- 9!p4[!Z7]*i£:X(15:A"C EOmqzGKJvwXivwXmt qOh\!JD(UͧF*J: MS#E9Aw'Ywr.O$"R뽫8*. W}ianotwQ(|h6t?ܽfxyXda`|Y>Op7Ϩֵ|KkLނPh 2u="K'j[$7@b:3тX"nmUUCrk˕a<%>jVCd2-r5\l\w}̸%oYk#Xӏ>k-y6$#bD]DΡJ/QB':C1KV{۶yv3b]`M l*|'6AKI7eҫуLe Ue҃nʤGFpF{&BlpW.l-VW :D2-NuoW{ lZVs?N)[6r~^ƿ94]?3TWvӅ~c-jrwgliĔQ< jy2X+oAaVuY=n+q/w~QruSICqSmۀ{s놵'n]e:]ĺU =u hrh7tJٟ8Y7Qu*&t^DT̺u[In94WJlիr0fZ~)3 /9ӳX*SwG1_~.'X7Q.墘C\ ~PIkV$ kBk`C'*9Oy] ,<$~l ٤tE18%(Y`t7Sor6ԳZa)עD 6Jp M$zHQ鯚~_cهTDտs<OLv:+\1}r.c* Ni[~B{kFr/Wחiq}48r{EFF  u[AK[FT U8O.(5Ʋ3w]<YZ^vwb R:1f%0!e,[ ťzn> -5PӞjδ*ߝ)b1{_~|5 zI?mI0&\:Q,YE[(t`<0ɽ[ǔ:2<#9jv¼u^4ZBHDqh=RrOgvFh)XP"=t4#|UE3 4FKݮf"^'׋뷾p~:\OcV?OmagGLdBJ7,q`bJZhOFkaK2"{s6lD)l$52EsM曥 5>BAOXFjS0ɉo5 Z : Q?K4/>lhN^9^pFA+6Xpɧiz'Iq"D>956ʕ3tnAu91\Wdd :7Bk|?ꋿ48/Rӽ (sV~/Qllrv`S]o/DZ܌ͧIiF^ɥmM.=;K[qus 7h$'J8ޝݽʫSߓI*/.*%}ka^%yA GuNӆHLb0 .Z-:*͑+S0*JAqI4JO .L ģ,DJW0@q5'vpkfb:1xIkgZ6"CmkJ~9(NN$).n cI.mUr,%K6)RA\.gٝHY?1]8@.;yqPo aTo&xh/oBQGnH@J Du<bnf 72k S2 I% h]r)EF]F!Z!.eH#$aKdA|)ȧ4X) iJKdY6N rj3 .ɪxчey "\ؕݻ|+ysq3 ]Q.TOlaDjx4UsGD+Cˈ{avz}HS4m`;׊J[~ڬA5G&OdS%B(J"ܝjG6VWp%k.CƨF#K.Qղo "g>ȓ,,YVd@y;8vB N_A=Ix:s>]6kc[ U>}Fo,={plDId,gM reLئIڦ<Ā<@yɻ[mZ`ePw:}JJ}ov[] tߨ2OrNz '6fA7aVOOa?.sdDrkC3)ǚ!`c2 |hrqgx8_sd?h\/q49:u>C_]C@xi~+@c=/>t!X+C*kV6QΗ eʂgq?Jm wRwhhRB# \\5L\bPsMza͓87il[i:MNz.ppS-_IosڏKٛ~f/ܿr`CsNa .3C4 S}% oMLVW#/ed#/,0> # ]ToRNJN %2"nr̤2KW>$h,)NpT X#s!|m}_<1> #9܈8ÖE>e!BY?Ɨup -'ɭWNwWng\u.,Zk}VN}gd o.uՌ2"x5UQmCl ADxxgT>l~3Dh8UXۭϰ M!$}=KO@}#&k^R]B#лgXFPkצP>< l>;keJnT2谚޴d@3UNRCq-EQ%le3]V6> )H&nMxD\ojV?Y@\]~r)~X!+w>XmVT"r9Mm96uxɩ1ǻ\;_*ѲTgJQ ,Uv*YU<0\gd8@TŶe^EZN?H'[Wq`yzi!ee~^}RIIK#$5S ia̸G{ Ӕi4C+m9԰1VKO"УJҭ:KR8"Td~j($LADQPW%(;'~@"Rf QfOƒGܳЎd) Z/k=R8;*\5CCi)qi ,)[,OM38 ” b2̔dլ#Kt7֩B()i&^oTğl:c: o-"@J%`D!A*s-ΚU31V7 =B7A`'A5=VȥYtwa=|D)% y$@nB =_s55\Bs6 2Uo܎o0Lvnݭ[{Y2.f9urTq禖:*D5|2FBބm=>E|ۺ{zq&c%1~, Oܓ1_^T}HPN++ܲnXC:)uNB]GetEt%d?mt$pTbǞ_\YZq0&Y8M4O/ďDxڨR/LCo8/EׄŨ7smr,?:m|V(U̝y۳^г2̨ɱmt946%eF)pr@H&KQP\3x4%[O}8=gNnr~q %éh^LR3ÛZex/z ?^sX 7OȠX@Nfxet`'u1Զ[oQmψ<Σ\r,.VZbF\kQ̇q} (`8O~8I|4/6@%58*Yuz88rUdm?hM-v,ETD5/c*:V>bFsüϚ`J}j 9bn1.*hDSeфEjhu"z-I'fu8G:o#N,t?s3~@<;qg8yd:L3\`g:~^0.A~-b+bgy767x]p:XGfBK`~`Y ++'B/q6)_fl}Sy{غhrڑl}/^~< 5 ^aK+F>X2qʟ003ufd:x+wV]Nɟ˺Rq<,$ɛQ '5cU#YS[7UZ/ϒN;5FT`]aj*MPWy]k!}" : *eW FcnMy ,ڵ8mQZ5|%(L۲ @=Y٧`]Wp_b(}azD_X8 jZ]up8ľ0ZyPF 侰`u<1sԚeu:qw論~0+Ç*- MdITTjLL72} &,Fs$v ]g_}mjHGqN0=GB?mʞh &ʟJT`R]pYfĨ=3dsph9Ⱥ<< r$ڢ$=| H ^pnN^\ɘp.G!Șܵ|=gR1hgC*,\]cN4+`(=v aF<HOe3Y3`ēXL/| 3 q%N!W0.iK1hUTܹuo957FH&NO&;ՐPmXл; ܶoS1nj/G%xMo.z]|;H8ޫ ݟ; /JqF:_o(V pP3Zi=B-9ڻk:|鴧ٙy/ʮv6|*ZSqߞuԻsBVʃ)Fu;3d`1VhݦА\Et{{-|<[!TYtQ`nJl&А\E:uJku~e?vNEuۨ=?\l>{mh5ECo._}WWի_ŌBRNۃcb7Z1)A#Nt?އ5J=}y TJlyH};$5r8%Ĩ5klk@(`>HB</ᖞZ,e_}O?"T&Y3.`4H<]'o"rf%+ʭ+֦Vw=; ǛFU,ɣ{d" &| 6ʂiL}y(͜zǤ`k|oxI~߂xۻ& (Ƽkv*Gۮ+~\U!tMçB~. ]򻾐op7$*[&&-jSaa0A^: QJMxǰsţ #hJr0.B ~_dL'Q%o񡛩)o8x35kcX䨩a{=1W|-yF%TGUt mdhthnk^0;5̰ux㊱"ձqw@OJ`8LwKAncH8]3F~5"XŊVW*2Jeݬ|c1=X. \yq[yU_s&<ϟ*Z((å/7AZH-ԥ4*aqh8 sl#;@8Π" ثDʵƍܛč,q1C.\B 9-V!V5M5U[ VVcKXпTh[2\Bِ ZñˢSJӚս{J Τ;¯J;4kPG[ɚیEmwi.ckٛE%No.zhqSх:U{bpzX ϝu( z_^KQR|HBM,Q ([/|'A~="F8['괧|?D&Ґ\Et O=놪w֭)SvB Pfm;km#W%"yg!k"KZ-ʎ#;rbe<ՍUdUquBBEtL9swl7nci0Dt~1 랦MҺzhݦrݓ)-CZ=׺x?Ս6e݄.\oW"߻a8+/WoOgˆ&/5Em~2YBʽ9Kʧ1}8ڻ9'χ%ߒBF1e0e akty x2DY;(G;myZVrw7:_a}>b;hO[wwpYXElGM™~}0QIoIhƏĢ"ZPŧŗ:F[ ]ѓ g6{-rgHv{!)P[r@FF}1#jeM3NmFʟ}_`2)h26J6:(êGnS^j@*,TCg#)y P #hzx=ɵ{'/|{==}_254ޗL zxh4(|; o&/rDqqRZ +qD(fĿ/KsI'htm˩fj*^!+Cm1adt 8awLb?j 2IؠT09Β%`zyi?0IcwF QS5tdoriavܚ$*.(x#SOfʘ O>O󝯡+vӷc#['}wT/f1lmn-c"xD7,k5W->hutZ9RT?D'/K^8:ݻ!aT>.W+|lbo+,xqyub~X06dGXZYʡu8(~5SH^SjN+Zdnb?c/$ S,lLELZ غvI=e H `*|V~_+J^_\֋$|WЙ՛$0, ")z`w)cYhZʤ2_893ITeC."++^~]G{#1MX?EԄo]OvyΉ]+kjgh bߚlX0Zm&v(|fș䨟]f-ȃof0R viC@1Dޘ`ՠa</؛4{]ӷMN l 2 KEUpx p7.'zSIJl1hEBY>"9N ˤQ$|cM1^c)Iu2JADZ/?g@P =IVAh1+cMOnj-*+\}ًjl^rDeey;o+"CsC7Xd\(W*dڙ ]O_r!#;JP2Q?a@h$4zKsz X%>)as`)"Q.B~g#%\ p|(!+>RL|*2)y1\NCrj3‘$`$X|řɁQd(m8h,!mۇ@`t!aeZ`b!ZKaӇ?WvRUz`^F+lZ )8N5/%RHUڜ!WcF}tkE.V%GVaق)UĈkв1 >&'Ǜ3&A#Tdu[ã'~_ڰ|ޝ?:;3Iw v$Vt~d$VBݣBD$'6n]I*<A%53OK.YQlP0IUv߿95YǛf;Hu?B; ʰhXTwİbvyjb,&G'2[K3-^1EbOځO[W}}9=Ҟ##c 9l Ѷmi#](8$F ,du;yu 7\݄1U۱㎬ҨcKMp.m!q ɝ ԔTxRY,dՑ[={ۚcY++ eo1zAV&|a+ӌPVfe'zbߘTeтxTBNcwɆ@ -\cT\'3Dl?Ij>LnC|O_frGLѰ9𐥷\:Xz8NGD*oRPBccWsxvszX<)M/.]al"HU7fLԎx߼O\f^%yKBY(lE9*P| `Nha:у"IXE옴4;CI!R!*~xc,dݘ:khg@2}G)c7 tH( Go5xw:\}Õjq%P!vwT@`?z4cTν%GoťSrp;}̑# qE>XS5.,{!\Fk htޞ+g7@^j5W 1ηW74P,‚- ^X/%/ٟa?dlP*8Me邯*֩9 H= d/pH Ҷ+W,7^$j-Cۯj2K뽚:ߔ|b!ʠuh#|m!؂:s u,MV0u᦬WNQ=-f1l`ť*Bj,07h8c`j\/%lgZTe#`4x(J0 0MVw!";"ru@ufCr"[YqgQɺ2ݓ`.L9% ؚ3)Yg+KH4cZ}p/zg 8k6,ܕ??f#^mS~ m#CicoW4вBWc42R4f-S]H:ϟM:#5uF(\αapF&N+r^ is MbޠIHdӱ= 4,uHI jE5XZot4y1ʧ|SFRJzo֏L1ȴR|d7@eIpX9T `m4MD>Ti ן<zmY*y&˟: R@E(lahJV1ex,e0)׶\KҶFIɱSJzk^.k,Vg?5~ǜ-- `Λao#Lb8%ȴMX̛a=՗~F`X=*p9Z)U(@к3u S/<:\gLɓ_w9)v6}Q|q)V^[ۦfS'.KXl/<,m'v?-x0YnJULĪʟCdWwW^õz֮㷯gHkaUEUWHJ`ĆGP!zwt~Ss3 B =wE!v,z 1AAz2'*AD(@5D`́+\XMw? q=znD]҅X!`FyAl^'x!ڝɫȭ^ J{4=-s'\8)e]aWFh3 Gu\R8M &tIx<%z>2ej_J@fitR,I)K!!,eEഀ 5Jt\'}#uXHRpYyy"RsF< @[=΢ޛx뭞VU:K)/ tW蹓]l"#k=pA4L b|[X8B؅zws!8}XI "ZubN]!!2iƂ-AvBE-]wbgY\/xb91&Ty! HDXBtef=%LKJm;K^p&OmUTRCJ+t.@Z8D܍`<c_p@T Jc]0PQ< <K0)z eK*؝_>(meR}w؍k8J3 Jbx<yYD]);n2K+s^ g&*'`~R%dPS]@΂s鬙-K2+a"+!R_>@SIͷc "٧J,Vd|0ߓ“J\yg( ഖTp!`U)& dDuWีN#A~,Gԝ y%9=6{g2ʻŽw7q 1F w|= BA9⃨QK˝[uRXz:U"a/,I)?QBp^mGFm|'zwg2~V+&hӺ.7dˮ^6$n /^[ZZYbmԍ[z≊Qnk!AP[%o78z,Bn&BR@?Äa퉖r\9]θ)X 1d]kf#n%]ַZv w4YB 鈛<2'kxhnL+ ^POglRj- MEut:@=YppbI -!K_ҳFDTgK6E2ȳKxq9{_|7Xrc!{90h H6-E35 ze4齹qoTA[/zW?{({~>?p[m21C)lK2a%L6 o zCr_'[\{;U0聪5ðYml9\䚥`^w/{=xMT*|p-!`95ך/c*[!xHr~y6*Kp:n<8ce`AHlvZ4SzG:dZ A@?LD7=^/O&s{ȿCXھ_-Z|LMxk)PoSgtzh8˖z Q~+h=~fԾV4vQpy-V4) Gfo^ud+tXEW} B?xܕm v(Í_]a?LJ~;[PTƌ4 j0 p_c\FJ.ʨil ِa"y&DFiah[Z9:bhEcni,|pZqִV?B>2D_p8b6"q:!Vs hil?ڣU18hѥf ,QOԥe*qoDKTH]> >Gq+ئ0m~y~w) g_wԮqx;|l^(q~U91'jakԛa '܋~W] ~_ynj0_F-@8oI.z!F!}{C /e-/~+YoQ XG=8m1XBt~ sE~'irLt[8r׎ۦ>Ms JG* .f]Gã/@Q>Y8us .ro1eRr`J1Oip߇t?jɜL^ٓƥA$Jl.(;MǨYCFrqZbJus_x{aK[-Q_3-I[ >M/09f'NPgdZ`T(h&be~Z.V-'O=ɜ^=A)1|]iUét;[FuV[S,'\v:ڢ#3),W>gaUw9drԸdO);a[밴`A\DON|@RQ9XRbZK Flgt-(o3ۊw'l$?OQ;۹DTLhEgNOڹӛd>~U׷ǿLóg 6ΚF@8  ;j:*^dw [[9]~g|w cޙy{ Л?~./(w:Hк.t%ykjxa ~v߂88?,^mqyep-&6ς <0$,!v˯|[&/O<ވӆPު@]Pl{zEA^5٧P4ҍ8 aFtQ /ƽd|RRjf 2MehbȲJ'LvݷW/y; "_s e+ %RafB[/BԌŒIF:Km#bt_)S9~GGmT΁s -OBJ׍k|g)q1i^28峪QꔂIjr>TyV D$b51R<w-NPmCy5еg]7hNtS2S;]W #8 %c~y<|aj#QV#XFrtsO9Xm;l7LwՔET)-#k5ZIJȒ1ոZ\Sx7'.%P ͺi I|zH6!;,Akft p1ξ5hE9TB2k9ՒV~YY Ŵ{B#+#Q~{TT"]#fx""*2L' Mȶs[o[. pOrbJv2WpAKU*[8,CSD ДuAD:F9qq!7Uu*cl:3r)"^aIl% EMV>,m&c t)`82F{Ū 62v6=4I@r}n{*iCfTX]466|u>?j {@R BҮgNf.F8itĖlʢ(S7։h3[~v}Y+2Wzk/Y;5MYPЍ9B ܵzxwꦄνb^1tIm܋B(%l}I}ƙ--nsN@t8 +\BrD*8{pu7( ??0^nUWPxx1íY0IbeY5]ةe] O'2S%"cGHH=wF'#:yx{Mj.F$syrޕ57r#ЋcgHd&rv3q(e%|}"%K:xuV*B"H|IbHCn~]wJB *B #Xʖ=B<8+9(,J+JQ1**"7oaST֓r}Ad 5@9pAQ"j"#zĤhZģڧ!$.7ҺBo49=!Dt7Z͖3* 9\X!!t,HstNoVD4#S!3Wϗ>~wz.+}D+dpLUA,RX/ +A2ླb$܁YIVυYDe1dB+~&%VRg[aIi:JCNoS>?EHUp(Ӟuf+W—R54 LR~3;21f5p*TaAtf~2DV͔Yp>kSPkVsJe6eTcxJJӬ8Zi0R4+F.JMRi_^XKTF@y9Z!Z)%xVe +Z>öRN\^q/W|VzVJky4[Ω |Zvlf7P~GwAnl n=H;i`&^κyv$~tL85zH +ol$K֣>ޥ)!(uYU.S(9##LfHCVU _ fkją4 }R^קu`&քnQGtDbCpѣnz LiHdBh-E0Ğ OS 쏓hzПgGlt*&׸Ye͊:w/Y>ϣ}Gr3ҫtQQbHJjUU;J"7kY9*س#\A~/eWig4\ARPqHSdZC5ke:+-0ثtaYZ=4/rCV*/^J\WiéJdҫFHb^[,]zvecXqF\J69!̅wSNdA x =K=;#X%vA>TWLYj)es<`Je䎺.d6&t|ЁT~ lGwpB.{f:M*$ɽM _$o`[T'.+ug?O7V\iSLGF]AikPM 3:T H%)ԂFQf{vuw=m̷N~_\^.9ֶOMk..!΋ '5Ev} b^P܅9***7wc"JSP]ʕ's޾Y{+ Z2o+Y$n|`RjtVkI54B7|H-N!=7>9fP5LpӧUVUgkp 2F8gDNM4ϱE4W݇?b&U/_aOP\?roGGi~qp;h(r-.TmyBnC-ΞQ@<_Ԝַmdqiݔb~n{y}YcS,2i;&;&CTpmQ!o&:jzy~bx?Ƀ5Ty;7q,37!6e~zݤTrPwtn B"'mϻ ?JncXgnm4z umvѭV+hx)zƨy}3m?tMO1s)`5r=WiTϕG+ S6>O؏ʨ>kS-:4%?=>HPA 7 O?K=T5.YHy= Tv(3:^]M@o'ܹDۢ;Pځ4`v5wPCBa7|j4" 2n&VY|hD$2FwFzs͆ϴw2Ne:TlD!Dl3rk 3g,)L'I*ZW[E ]F?q[!Ֆڎ$]+d6wKͦq]契 g92FQ4&uBɁ؜8Q QfD)=biN, T!0RF)* 8VT%o;>peɹV^ SsW@uu}-߬NܫȀCERӫuJJy UeJJ)+ Rۏ{U IM j]=EO$$|Çnt TKlm"*3&F%:qp\Zu֦A!ц oj;@nA:mDD$Ba!)ks玲 8ہ2H Np2ۃ\ g;|&:Ȧwc.Pwtn"Em MeS&ѦL,4#LYjdTǢ. `J!Ƞi'"/ҦO%1/릣x/)ZScaOQ4+BR轰R4+fVǦmPP#[)%;5Ո@G+=`+U"Bw|n ̎}qg:+ogYx,,6$Tv#Gr/S{֦H)Ƅ:u5*b!c/3B-B]塪lRq;PJw7|W*i_9z*n\ Rkn,Mh%.(`P/]De~|k>oY7Zb^_aedo~3Ӿ.br&ͼyv}t>-gCRnاO#z<]9 6ϪQwZ emبEp_;D͞K^98x̬eF޻":[{[Iqg+u+7킈=m ~:RB*0Q7t<V;_l `pbD Sl2L*F !R,s򕦂 4Rx\y,m^5!eڃM—>kS-ENIx2J4ކE(A.愕%9U-ׅom-ںi't98N*G.L*B)bXN-/3EXS U#x|E2 6HlX!) kaxJLeLcN/\RJͫSL AAKԞƘh~8kF4[ygW0azr1J"MY+b+<hf>l f9^Z=/r@8ZJ!&̟^(Béh)s2FAKxX&`tN. iN5] VnB$z9KOu̓:)$m'+Z\q+]HIJ$XMYIƒhEf$sF8?;^U<9e*VJ|nj y=/kkBk$ !rJQ ^jQ #P-Sg{|@i{|5hxdܒ?o8]ZiVZS}V6J}CVg-lV~W|k'X {aT]T'>:.v:$@6졠eܻ-t4Ua(%T^,˜ "oq](`JI-;}*1Mvn4A \RHLYإxYJdntBW;Q1^ zכ@Eވ[gJs5K]`UukZ5Rj;M,~rV9l@3zA}~5%4@*?NTX[Lquts\0ƹrSlGqtE\:t&a$?,`F" g|G8/_lUyS1ͥ."kVť[,L0V U-rDCJB9K**IjCzTu:D0嶸C_t:Y˒h4/w]4nX[-JBċ,Mo>T6O.$OhE4ZHR/ Jrҩndk=aЊÊJAFckܺ]E0E-Zr-`i1#K ĨX`xQ=?Ns~x{>s-:"|j|~yU1 ˻ݶa*Ao(Qwe`'Z`i;';n:I?Fr H[8^bbÀ oiCO%gw uYyP k޷;1'vPgma4 C1mSid+~iDA oi7# Đkhe;`Zh,Pk-s^Agk7i5̏zp-[aH.Y] yT@s~Ozw6~uPRV%}C.&|V;85?{6 3)zyƐ9 #wcCI ZYV y1pӯ"[{g<ߞ3VOER(*Eq͜Dٶ<tƷe4L@6}KPT[We]'6>yJ؎k}eV /Gn._棢?= un+Չ8#4A@ґk$7 nJ?%FKAٹwԤQ'ffLZ-\j#'VlEAH1bZ9%֊0lEXBDslJ Nrݦe+z\ BL'w^t "n [ sͲ­Fgލ$ۄw y1m,j-TFw+a!wn6e4CXsSaq́1pHzgW ⤳]%=1ˆ}enF.{#B7?#-r҃}qtoL|,^EpԌd`co_}gq.@ >S*C 1 BsQ@&KWG1ƯR W;!7M&op0B 3!􏘐CpbBLV"var/home/core/zuul-output/logs/kubelet.log0000644000000000000000005030572715150261737017715 0ustar rootrootFeb 27 08:28:20 crc systemd[1]: Starting Kubernetes Kubelet... Feb 27 08:28:20 crc restorecon[4695]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:20 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 27 08:28:21 crc restorecon[4695]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 27 08:28:21 crc restorecon[4695]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Feb 27 08:28:22 crc kubenswrapper[4906]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 27 08:28:22 crc kubenswrapper[4906]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Feb 27 08:28:22 crc kubenswrapper[4906]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 27 08:28:22 crc kubenswrapper[4906]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 27 08:28:22 crc kubenswrapper[4906]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Feb 27 08:28:22 crc kubenswrapper[4906]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.061874 4906 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066746 4906 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066780 4906 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066790 4906 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066798 4906 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066805 4906 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066813 4906 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066822 4906 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066831 4906 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066839 4906 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066845 4906 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066851 4906 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066857 4906 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066862 4906 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066874 4906 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066911 4906 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066922 4906 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066929 4906 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066935 4906 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066941 4906 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066946 4906 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066952 4906 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066957 4906 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066962 4906 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066967 4906 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066972 4906 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066978 4906 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066983 4906 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066988 4906 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066993 4906 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.066998 4906 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067004 4906 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067009 4906 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067014 4906 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067019 4906 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067024 4906 feature_gate.go:330] unrecognized feature gate: Example Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067030 4906 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067035 4906 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067041 4906 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067046 4906 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067051 4906 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067059 4906 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067064 4906 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067069 4906 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067075 4906 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067082 4906 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067087 4906 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067093 4906 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067098 4906 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067104 4906 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067109 4906 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067114 4906 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067119 4906 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067124 4906 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067129 4906 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067135 4906 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067139 4906 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067147 4906 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067153 4906 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067160 4906 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067166 4906 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067175 4906 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067181 4906 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067186 4906 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067191 4906 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067197 4906 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067202 4906 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067207 4906 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067212 4906 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067217 4906 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067223 4906 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.067228 4906 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.069952 4906 flags.go:64] FLAG: --address="0.0.0.0" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.069974 4906 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.069990 4906 flags.go:64] FLAG: --anonymous-auth="true" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070008 4906 flags.go:64] FLAG: --application-metrics-count-limit="100" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070019 4906 flags.go:64] FLAG: --authentication-token-webhook="false" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070031 4906 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070042 4906 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070058 4906 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070065 4906 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070072 4906 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070078 4906 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070085 4906 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070091 4906 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070097 4906 flags.go:64] FLAG: --cgroup-root="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070103 4906 flags.go:64] FLAG: --cgroups-per-qos="true" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070108 4906 flags.go:64] FLAG: --client-ca-file="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070114 4906 flags.go:64] FLAG: --cloud-config="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070120 4906 flags.go:64] FLAG: --cloud-provider="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070126 4906 flags.go:64] FLAG: --cluster-dns="[]" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070135 4906 flags.go:64] FLAG: --cluster-domain="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070142 4906 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070148 4906 flags.go:64] FLAG: --config-dir="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070154 4906 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070160 4906 flags.go:64] FLAG: --container-log-max-files="5" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070168 4906 flags.go:64] FLAG: --container-log-max-size="10Mi" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070174 4906 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070180 4906 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070187 4906 flags.go:64] FLAG: --containerd-namespace="k8s.io" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070193 4906 flags.go:64] FLAG: --contention-profiling="false" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070200 4906 flags.go:64] FLAG: --cpu-cfs-quota="true" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070207 4906 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070213 4906 flags.go:64] FLAG: --cpu-manager-policy="none" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070219 4906 flags.go:64] FLAG: --cpu-manager-policy-options="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070226 4906 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070232 4906 flags.go:64] FLAG: --enable-controller-attach-detach="true" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070238 4906 flags.go:64] FLAG: --enable-debugging-handlers="true" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070244 4906 flags.go:64] FLAG: --enable-load-reader="false" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070250 4906 flags.go:64] FLAG: --enable-server="true" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070256 4906 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070263 4906 flags.go:64] FLAG: --event-burst="100" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070270 4906 flags.go:64] FLAG: --event-qps="50" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070276 4906 flags.go:64] FLAG: --event-storage-age-limit="default=0" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070282 4906 flags.go:64] FLAG: --event-storage-event-limit="default=0" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070288 4906 flags.go:64] FLAG: --eviction-hard="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070296 4906 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070302 4906 flags.go:64] FLAG: --eviction-minimum-reclaim="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070308 4906 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070314 4906 flags.go:64] FLAG: --eviction-soft="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070320 4906 flags.go:64] FLAG: --eviction-soft-grace-period="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070326 4906 flags.go:64] FLAG: --exit-on-lock-contention="false" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070332 4906 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070338 4906 flags.go:64] FLAG: --experimental-mounter-path="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070344 4906 flags.go:64] FLAG: --fail-cgroupv1="false" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070349 4906 flags.go:64] FLAG: --fail-swap-on="true" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070355 4906 flags.go:64] FLAG: --feature-gates="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070363 4906 flags.go:64] FLAG: --file-check-frequency="20s" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070369 4906 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070375 4906 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070381 4906 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070387 4906 flags.go:64] FLAG: --healthz-port="10248" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070394 4906 flags.go:64] FLAG: --help="false" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070402 4906 flags.go:64] FLAG: --hostname-override="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070408 4906 flags.go:64] FLAG: --housekeeping-interval="10s" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070414 4906 flags.go:64] FLAG: --http-check-frequency="20s" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070420 4906 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070426 4906 flags.go:64] FLAG: --image-credential-provider-config="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070432 4906 flags.go:64] FLAG: --image-gc-high-threshold="85" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070438 4906 flags.go:64] FLAG: --image-gc-low-threshold="80" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070444 4906 flags.go:64] FLAG: --image-service-endpoint="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070450 4906 flags.go:64] FLAG: --kernel-memcg-notification="false" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070456 4906 flags.go:64] FLAG: --kube-api-burst="100" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070462 4906 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070468 4906 flags.go:64] FLAG: --kube-api-qps="50" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070474 4906 flags.go:64] FLAG: --kube-reserved="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070480 4906 flags.go:64] FLAG: --kube-reserved-cgroup="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070486 4906 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070492 4906 flags.go:64] FLAG: --kubelet-cgroups="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070499 4906 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070505 4906 flags.go:64] FLAG: --lock-file="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070511 4906 flags.go:64] FLAG: --log-cadvisor-usage="false" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070517 4906 flags.go:64] FLAG: --log-flush-frequency="5s" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070523 4906 flags.go:64] FLAG: --log-json-info-buffer-size="0" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070533 4906 flags.go:64] FLAG: --log-json-split-stream="false" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070539 4906 flags.go:64] FLAG: --log-text-info-buffer-size="0" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070546 4906 flags.go:64] FLAG: --log-text-split-stream="false" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070551 4906 flags.go:64] FLAG: --logging-format="text" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070557 4906 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070565 4906 flags.go:64] FLAG: --make-iptables-util-chains="true" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070570 4906 flags.go:64] FLAG: --manifest-url="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070576 4906 flags.go:64] FLAG: --manifest-url-header="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070584 4906 flags.go:64] FLAG: --max-housekeeping-interval="15s" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070590 4906 flags.go:64] FLAG: --max-open-files="1000000" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070598 4906 flags.go:64] FLAG: --max-pods="110" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070605 4906 flags.go:64] FLAG: --maximum-dead-containers="-1" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070611 4906 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070617 4906 flags.go:64] FLAG: --memory-manager-policy="None" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070623 4906 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070629 4906 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070635 4906 flags.go:64] FLAG: --node-ip="192.168.126.11" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070641 4906 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070654 4906 flags.go:64] FLAG: --node-status-max-images="50" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070660 4906 flags.go:64] FLAG: --node-status-update-frequency="10s" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070667 4906 flags.go:64] FLAG: --oom-score-adj="-999" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070673 4906 flags.go:64] FLAG: --pod-cidr="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070679 4906 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070689 4906 flags.go:64] FLAG: --pod-manifest-path="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070696 4906 flags.go:64] FLAG: --pod-max-pids="-1" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070702 4906 flags.go:64] FLAG: --pods-per-core="0" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070709 4906 flags.go:64] FLAG: --port="10250" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070715 4906 flags.go:64] FLAG: --protect-kernel-defaults="false" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070721 4906 flags.go:64] FLAG: --provider-id="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070727 4906 flags.go:64] FLAG: --qos-reserved="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070734 4906 flags.go:64] FLAG: --read-only-port="10255" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070747 4906 flags.go:64] FLAG: --register-node="true" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070754 4906 flags.go:64] FLAG: --register-schedulable="true" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070761 4906 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070772 4906 flags.go:64] FLAG: --registry-burst="10" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070780 4906 flags.go:64] FLAG: --registry-qps="5" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070786 4906 flags.go:64] FLAG: --reserved-cpus="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070792 4906 flags.go:64] FLAG: --reserved-memory="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070800 4906 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070806 4906 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070812 4906 flags.go:64] FLAG: --rotate-certificates="false" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070819 4906 flags.go:64] FLAG: --rotate-server-certificates="false" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070825 4906 flags.go:64] FLAG: --runonce="false" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070831 4906 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070839 4906 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070845 4906 flags.go:64] FLAG: --seccomp-default="false" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070852 4906 flags.go:64] FLAG: --serialize-image-pulls="true" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070858 4906 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070865 4906 flags.go:64] FLAG: --storage-driver-db="cadvisor" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070873 4906 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070914 4906 flags.go:64] FLAG: --storage-driver-password="root" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070959 4906 flags.go:64] FLAG: --storage-driver-secure="false" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070965 4906 flags.go:64] FLAG: --storage-driver-table="stats" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070971 4906 flags.go:64] FLAG: --storage-driver-user="root" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070977 4906 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070984 4906 flags.go:64] FLAG: --sync-frequency="1m0s" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070990 4906 flags.go:64] FLAG: --system-cgroups="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.070997 4906 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.071007 4906 flags.go:64] FLAG: --system-reserved-cgroup="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.071014 4906 flags.go:64] FLAG: --tls-cert-file="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.071022 4906 flags.go:64] FLAG: --tls-cipher-suites="[]" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.071051 4906 flags.go:64] FLAG: --tls-min-version="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.071061 4906 flags.go:64] FLAG: --tls-private-key-file="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.071068 4906 flags.go:64] FLAG: --topology-manager-policy="none" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.071075 4906 flags.go:64] FLAG: --topology-manager-policy-options="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.071082 4906 flags.go:64] FLAG: --topology-manager-scope="container" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.071089 4906 flags.go:64] FLAG: --v="2" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.071111 4906 flags.go:64] FLAG: --version="false" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.071121 4906 flags.go:64] FLAG: --vmodule="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.071129 4906 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.071137 4906 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071317 4906 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071326 4906 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071332 4906 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071338 4906 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071344 4906 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071350 4906 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071356 4906 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071361 4906 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071366 4906 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071371 4906 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071376 4906 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071382 4906 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071387 4906 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071392 4906 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071397 4906 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071402 4906 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071408 4906 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071413 4906 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071420 4906 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071427 4906 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071433 4906 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071438 4906 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071444 4906 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071449 4906 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071455 4906 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071460 4906 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071465 4906 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071472 4906 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071478 4906 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071484 4906 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071490 4906 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071495 4906 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071502 4906 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071508 4906 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071514 4906 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071520 4906 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071527 4906 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071533 4906 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071539 4906 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071544 4906 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071550 4906 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071555 4906 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071560 4906 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071565 4906 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071571 4906 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071576 4906 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071581 4906 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071586 4906 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071591 4906 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071597 4906 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071602 4906 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071607 4906 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071612 4906 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071618 4906 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071623 4906 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071628 4906 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071633 4906 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071638 4906 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071643 4906 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071649 4906 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071654 4906 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071659 4906 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071664 4906 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071669 4906 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071674 4906 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071679 4906 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071684 4906 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071691 4906 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071702 4906 feature_gate.go:330] unrecognized feature gate: Example Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071708 4906 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.071713 4906 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.071730 4906 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.087464 4906 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.087505 4906 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087582 4906 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087592 4906 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087600 4906 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087606 4906 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087612 4906 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087617 4906 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087623 4906 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087629 4906 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087635 4906 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087640 4906 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087646 4906 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087653 4906 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087659 4906 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087666 4906 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087672 4906 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087679 4906 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087684 4906 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087690 4906 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087695 4906 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087701 4906 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087706 4906 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087711 4906 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087717 4906 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087722 4906 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087727 4906 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087734 4906 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087739 4906 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087745 4906 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087750 4906 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087756 4906 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087762 4906 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087767 4906 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087772 4906 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087778 4906 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087783 4906 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087788 4906 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087794 4906 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087799 4906 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087804 4906 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087810 4906 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087815 4906 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087820 4906 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087825 4906 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087831 4906 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087836 4906 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087842 4906 feature_gate.go:330] unrecognized feature gate: Example Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087847 4906 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087853 4906 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087859 4906 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087864 4906 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087869 4906 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087874 4906 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087900 4906 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087908 4906 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087914 4906 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087920 4906 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087925 4906 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087931 4906 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087938 4906 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087947 4906 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087954 4906 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087961 4906 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087968 4906 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087974 4906 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087980 4906 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087986 4906 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087992 4906 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.087997 4906 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088003 4906 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088008 4906 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088014 4906 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.088023 4906 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088193 4906 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088207 4906 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088216 4906 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088224 4906 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088232 4906 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088238 4906 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088243 4906 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088250 4906 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088256 4906 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088261 4906 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088267 4906 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088272 4906 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088279 4906 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088285 4906 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088292 4906 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088297 4906 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088302 4906 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088308 4906 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088313 4906 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088318 4906 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088323 4906 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088328 4906 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088334 4906 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088339 4906 feature_gate.go:330] unrecognized feature gate: Example Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088344 4906 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088350 4906 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088358 4906 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088364 4906 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088370 4906 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088375 4906 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088380 4906 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088386 4906 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088391 4906 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088396 4906 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088401 4906 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088406 4906 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088412 4906 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088417 4906 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088422 4906 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088428 4906 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088435 4906 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088441 4906 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088446 4906 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088451 4906 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088457 4906 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088463 4906 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088469 4906 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088475 4906 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088481 4906 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088488 4906 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088497 4906 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088506 4906 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088513 4906 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088521 4906 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088528 4906 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088534 4906 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088541 4906 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088548 4906 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088556 4906 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088562 4906 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088569 4906 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088577 4906 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088584 4906 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088591 4906 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088597 4906 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088604 4906 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088610 4906 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088615 4906 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088620 4906 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088625 4906 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.088631 4906 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.088642 4906 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.089572 4906 server.go:940] "Client rotation is on, will bootstrap in background" Feb 27 08:28:22 crc kubenswrapper[4906]: E0227 08:28:22.095700 4906 bootstrap.go:266] "Unhandled Error" err="part of the existing bootstrap client certificate in /var/lib/kubelet/kubeconfig is expired: 2026-02-24 05:52:08 +0000 UTC" logger="UnhandledError" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.102247 4906 bootstrap.go:101] "Use the bootstrap credentials to request a cert, and set kubeconfig to point to the certificate dir" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.102408 4906 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.104565 4906 server.go:997] "Starting client certificate rotation" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.104606 4906 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.107241 4906 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.172363 4906 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.179323 4906 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 27 08:28:22 crc kubenswrapper[4906]: E0227 08:28:22.179362 4906 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.2:6443: connect: connection refused" logger="UnhandledError" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.209701 4906 log.go:25] "Validated CRI v1 runtime API" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.349593 4906 log.go:25] "Validated CRI v1 image API" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.352026 4906 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.363761 4906 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-02-27-08-23-56-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.363801 4906 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:41 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.382379 4906 manager.go:217] Machine: {Timestamp:2026-02-27 08:28:22.379201444 +0000 UTC m=+0.773603094 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654116352 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:6a844eb5-aea5-4505-a424-d96dc4bc1329 BootID:b1730d04-932a-4b41-89e7-49b2fa07b78e Filesystems:[{Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:41 Capacity:3365408768 Type:vfs Inodes:821633 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108168 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827056128 Type:vfs Inodes:4108168 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827060224 Type:vfs Inodes:1048576 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:57:27:4a Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:57:27:4a Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:50:bc:f8 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:70:04:e5 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:d0:43:c2 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:cd:8b:6b Speed:-1 Mtu:1496} {Name:eth10 MacAddress:d6:45:4b:20:4a:b2 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:4e:db:32:b1:fe:9d Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654116352 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.382639 4906 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.382755 4906 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.383118 4906 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.383307 4906 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.383339 4906 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.383536 4906 topology_manager.go:138] "Creating topology manager with none policy" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.383547 4906 container_manager_linux.go:303] "Creating device plugin manager" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.383984 4906 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.384019 4906 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.385255 4906 state_mem.go:36] "Initialized new in-memory state store" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.385364 4906 server.go:1245] "Using root directory" path="/var/lib/kubelet" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.399770 4906 kubelet.go:418] "Attempting to sync node with API server" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.399799 4906 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.399847 4906 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.399866 4906 kubelet.go:324] "Adding apiserver pod source" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.399903 4906 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.406330 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.2:6443: connect: connection refused Feb 27 08:28:22 crc kubenswrapper[4906]: E0227 08:28:22.406603 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.2:6443: connect: connection refused" logger="UnhandledError" Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.407530 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.2:6443: connect: connection refused Feb 27 08:28:22 crc kubenswrapper[4906]: E0227 08:28:22.407606 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.2:6443: connect: connection refused" logger="UnhandledError" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.410492 4906 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.411996 4906 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.415829 4906 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.418103 4906 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.418165 4906 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.418189 4906 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.418206 4906 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.418231 4906 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.418247 4906 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.418261 4906 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.418285 4906 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.418306 4906 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.418324 4906 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.418348 4906 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.418365 4906 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.420135 4906 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.421109 4906 server.go:1280] "Started kubelet" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.421649 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.2:6443: connect: connection refused Feb 27 08:28:22 crc systemd[1]: Started Kubernetes Kubelet. Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.425443 4906 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.425443 4906 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.426620 4906 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.429533 4906 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.429591 4906 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Feb 27 08:28:22 crc kubenswrapper[4906]: E0227 08:28:22.429941 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.430102 4906 volume_manager.go:287] "The desired_state_of_world populator starts" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.430118 4906 volume_manager.go:289] "Starting Kubelet Volume Manager" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.430220 4906 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Feb 27 08:28:22 crc kubenswrapper[4906]: E0227 08:28:22.430522 4906 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" interval="200ms" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.431755 4906 factory.go:55] Registering systemd factory Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.431813 4906 factory.go:221] Registration of the systemd container factory successfully Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.432240 4906 factory.go:153] Registering CRI-O factory Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.432272 4906 factory.go:221] Registration of the crio container factory successfully Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.432407 4906 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.432446 4906 factory.go:103] Registering Raw factory Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.432475 4906 manager.go:1196] Started watching for new ooms in manager Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.431470 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.2:6443: connect: connection refused Feb 27 08:28:22 crc kubenswrapper[4906]: E0227 08:28:22.433371 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.2:6443: connect: connection refused" logger="UnhandledError" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.433837 4906 manager.go:319] Starting recovery of all containers Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.446337 4906 server.go:460] "Adding debug handlers to kubelet server" Feb 27 08:28:22 crc kubenswrapper[4906]: E0227 08:28:22.456112 4906 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.2:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.18980d242e89f0e3 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.421065955 +0000 UTC m=+0.815467605,LastTimestamp:2026-02-27 08:28:22.421065955 +0000 UTC m=+0.815467605,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.467584 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.467776 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.467815 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.467842 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.467871 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.467933 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.467964 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468011 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468052 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468080 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468107 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468134 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468162 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468196 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468223 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468263 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468295 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468346 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468374 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468399 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468426 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468453 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468482 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468531 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468559 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468595 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468633 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468686 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468717 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468747 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468774 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468813 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468840 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468870 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468938 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468966 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.468992 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469020 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469044 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469135 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469165 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469194 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469225 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469255 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469281 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469311 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469337 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469401 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469436 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469462 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469489 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469518 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469557 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469587 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469617 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469735 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469768 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469832 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.469868 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.484223 4906 manager.go:324] Recovery completed Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.494869 4906 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495053 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495085 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495110 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495138 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495159 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495186 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495207 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495248 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495271 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495292 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495313 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495333 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495353 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495376 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495399 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495420 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495441 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495461 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495484 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495504 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495524 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495544 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495564 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495582 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495655 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495676 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495697 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495718 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495738 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495759 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495781 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495802 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495821 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495842 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495863 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495912 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495934 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495955 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.495978 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496000 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496023 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496044 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496064 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496133 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496154 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496190 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496213 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496237 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496261 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496285 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496308 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496332 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496354 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496387 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496411 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496435 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496458 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496479 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496494 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496497 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496646 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496668 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496730 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496752 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496771 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496793 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496812 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496847 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496869 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496912 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496934 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496953 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496971 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.496992 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497012 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497039 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497083 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497119 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497145 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497170 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497195 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497220 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497245 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497263 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497295 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497318 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497339 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497359 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497379 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497403 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497422 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497445 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497464 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497483 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497505 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497525 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497546 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497566 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497588 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497609 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497630 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497653 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497674 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497693 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497713 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497743 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497763 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497783 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497803 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497825 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497846 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497864 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497915 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497935 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497957 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497977 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.497996 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498014 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498081 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498102 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498122 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498142 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498161 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498180 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498199 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498220 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498241 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498261 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498279 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498300 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498320 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498341 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498361 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498381 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498402 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498421 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498441 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498462 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498480 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498499 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498518 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498537 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498557 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498576 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498594 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498614 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498635 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498656 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498674 4906 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498692 4906 reconstruct.go:97] "Volume reconstruction finished" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.498707 4906 reconciler.go:26] "Reconciler: start to sync state" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.505183 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.505230 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.505242 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.506016 4906 cpu_manager.go:225] "Starting CPU manager" policy="none" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.506062 4906 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.506088 4906 state_mem.go:36] "Initialized new in-memory state store" Feb 27 08:28:22 crc kubenswrapper[4906]: E0227 08:28:22.530334 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.547792 4906 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.550761 4906 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.550800 4906 status_manager.go:217] "Starting to sync pod status with apiserver" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.550837 4906 kubelet.go:2335] "Starting kubelet main sync loop" Feb 27 08:28:22 crc kubenswrapper[4906]: E0227 08:28:22.551021 4906 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Feb 27 08:28:22 crc kubenswrapper[4906]: W0227 08:28:22.551567 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.2:6443: connect: connection refused Feb 27 08:28:22 crc kubenswrapper[4906]: E0227 08:28:22.551618 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.2:6443: connect: connection refused" logger="UnhandledError" Feb 27 08:28:22 crc kubenswrapper[4906]: E0227 08:28:22.631266 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:28:22 crc kubenswrapper[4906]: E0227 08:28:22.632004 4906 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" interval="400ms" Feb 27 08:28:22 crc kubenswrapper[4906]: E0227 08:28:22.652110 4906 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Feb 27 08:28:22 crc kubenswrapper[4906]: E0227 08:28:22.731613 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.748949 4906 policy_none.go:49] "None policy: Start" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.750071 4906 memory_manager.go:170] "Starting memorymanager" policy="None" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.750165 4906 state_mem.go:35] "Initializing new in-memory state store" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.810857 4906 manager.go:334] "Starting Device Plugin manager" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.810951 4906 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.810967 4906 server.go:79] "Starting device plugin registration server" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.811453 4906 eviction_manager.go:189] "Eviction manager: starting control loop" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.811468 4906 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.811781 4906 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.811907 4906 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.811923 4906 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Feb 27 08:28:22 crc kubenswrapper[4906]: E0227 08:28:22.819907 4906 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.852424 4906 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.852654 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.854589 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.854664 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.854678 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.854900 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.855443 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.855607 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.859651 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.859718 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.859773 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.860064 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.860191 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.860266 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.860594 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.861278 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.861385 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.862117 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.862193 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.862208 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.862417 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.862767 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.863094 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.863250 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.863279 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.863656 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.867103 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.867153 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.867184 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.867257 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.867290 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.867300 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.867850 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.868409 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.868456 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.870105 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.870161 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.870171 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.870314 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.870330 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.870339 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.870475 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.870500 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.871348 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.871372 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.871382 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.911995 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.913352 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.913394 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.913408 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:22 crc kubenswrapper[4906]: I0227 08:28:22.913440 4906 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 27 08:28:22 crc kubenswrapper[4906]: E0227 08:28:22.913859 4906 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.2:6443: connect: connection refused" node="crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.006903 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.006955 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.006985 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.007008 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.007030 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.007051 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.007073 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.007092 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.007152 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.007183 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.007244 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.007267 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.007288 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.007332 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.007355 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: E0227 08:28:23.032725 4906 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" interval="800ms" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.108895 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.108943 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.108965 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.108986 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109009 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109031 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109053 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109080 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109103 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109108 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109171 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109124 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109218 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109242 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109301 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109306 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109334 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109335 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109364 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109384 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109394 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109201 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109474 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109252 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109520 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109276 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109651 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109692 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109700 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.109719 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.114902 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.116430 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.116514 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.116535 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.116614 4906 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 27 08:28:23 crc kubenswrapper[4906]: E0227 08:28:23.117252 4906 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.2:6443: connect: connection refused" node="crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.205990 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.223357 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.232819 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.272059 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.279437 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 27 08:28:23 crc kubenswrapper[4906]: W0227 08:28:23.369225 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-d9207a1d1eacaa6044ece8195a690ed24cd9ca3c606c15b9348318a4bf5526e0 WatchSource:0}: Error finding container d9207a1d1eacaa6044ece8195a690ed24cd9ca3c606c15b9348318a4bf5526e0: Status 404 returned error can't find the container with id d9207a1d1eacaa6044ece8195a690ed24cd9ca3c606c15b9348318a4bf5526e0 Feb 27 08:28:23 crc kubenswrapper[4906]: W0227 08:28:23.370403 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-807901ed2aa754cff1acefbfced8ffd47908276cfbec757ac0bf70e97af5c196 WatchSource:0}: Error finding container 807901ed2aa754cff1acefbfced8ffd47908276cfbec757ac0bf70e97af5c196: Status 404 returned error can't find the container with id 807901ed2aa754cff1acefbfced8ffd47908276cfbec757ac0bf70e97af5c196 Feb 27 08:28:23 crc kubenswrapper[4906]: W0227 08:28:23.377439 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-fd272e67b919f6d8b1c9f417a9b93f5f6b49a4a4950ec2410a8d1dc5f9f635e8 WatchSource:0}: Error finding container fd272e67b919f6d8b1c9f417a9b93f5f6b49a4a4950ec2410a8d1dc5f9f635e8: Status 404 returned error can't find the container with id fd272e67b919f6d8b1c9f417a9b93f5f6b49a4a4950ec2410a8d1dc5f9f635e8 Feb 27 08:28:23 crc kubenswrapper[4906]: W0227 08:28:23.387664 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-b71553f823f13bc4c601c080ab219e166fc6d31140c5e54d26e26ec501640429 WatchSource:0}: Error finding container b71553f823f13bc4c601c080ab219e166fc6d31140c5e54d26e26ec501640429: Status 404 returned error can't find the container with id b71553f823f13bc4c601c080ab219e166fc6d31140c5e54d26e26ec501640429 Feb 27 08:28:23 crc kubenswrapper[4906]: W0227 08:28:23.388492 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-7a5310f2eb1f5f38542a3866847d5b13911851528a9da4e5e470af8b59ccbfac WatchSource:0}: Error finding container 7a5310f2eb1f5f38542a3866847d5b13911851528a9da4e5e470af8b59ccbfac: Status 404 returned error can't find the container with id 7a5310f2eb1f5f38542a3866847d5b13911851528a9da4e5e470af8b59ccbfac Feb 27 08:28:23 crc kubenswrapper[4906]: W0227 08:28:23.422492 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.2:6443: connect: connection refused Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.422578 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.2:6443: connect: connection refused Feb 27 08:28:23 crc kubenswrapper[4906]: E0227 08:28:23.422645 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.2:6443: connect: connection refused" logger="UnhandledError" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.518427 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.519819 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.519857 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.519949 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.519984 4906 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 27 08:28:23 crc kubenswrapper[4906]: E0227 08:28:23.520472 4906 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.2:6443: connect: connection refused" node="crc" Feb 27 08:28:23 crc kubenswrapper[4906]: W0227 08:28:23.547712 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.2:6443: connect: connection refused Feb 27 08:28:23 crc kubenswrapper[4906]: E0227 08:28:23.547838 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.2:6443: connect: connection refused" logger="UnhandledError" Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.555535 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b71553f823f13bc4c601c080ab219e166fc6d31140c5e54d26e26ec501640429"} Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.557506 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"fd272e67b919f6d8b1c9f417a9b93f5f6b49a4a4950ec2410a8d1dc5f9f635e8"} Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.559318 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"807901ed2aa754cff1acefbfced8ffd47908276cfbec757ac0bf70e97af5c196"} Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.560787 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"d9207a1d1eacaa6044ece8195a690ed24cd9ca3c606c15b9348318a4bf5526e0"} Feb 27 08:28:23 crc kubenswrapper[4906]: I0227 08:28:23.561724 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"7a5310f2eb1f5f38542a3866847d5b13911851528a9da4e5e470af8b59ccbfac"} Feb 27 08:28:23 crc kubenswrapper[4906]: E0227 08:28:23.833649 4906 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" interval="1.6s" Feb 27 08:28:23 crc kubenswrapper[4906]: W0227 08:28:23.915600 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.2:6443: connect: connection refused Feb 27 08:28:23 crc kubenswrapper[4906]: E0227 08:28:23.915691 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.2:6443: connect: connection refused" logger="UnhandledError" Feb 27 08:28:24 crc kubenswrapper[4906]: W0227 08:28:24.049543 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.2:6443: connect: connection refused Feb 27 08:28:24 crc kubenswrapper[4906]: E0227 08:28:24.049665 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.2:6443: connect: connection refused" logger="UnhandledError" Feb 27 08:28:24 crc kubenswrapper[4906]: I0227 08:28:24.276723 4906 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 27 08:28:24 crc kubenswrapper[4906]: E0227 08:28:24.278589 4906 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.2:6443: connect: connection refused" logger="UnhandledError" Feb 27 08:28:24 crc kubenswrapper[4906]: I0227 08:28:24.321249 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:24 crc kubenswrapper[4906]: I0227 08:28:24.323560 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:24 crc kubenswrapper[4906]: I0227 08:28:24.323635 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:24 crc kubenswrapper[4906]: I0227 08:28:24.323654 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:24 crc kubenswrapper[4906]: I0227 08:28:24.323698 4906 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 27 08:28:24 crc kubenswrapper[4906]: E0227 08:28:24.324269 4906 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.2:6443: connect: connection refused" node="crc" Feb 27 08:28:24 crc kubenswrapper[4906]: I0227 08:28:24.423023 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.2:6443: connect: connection refused Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.423488 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.2:6443: connect: connection refused Feb 27 08:28:25 crc kubenswrapper[4906]: E0227 08:28:25.434634 4906 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" interval="3.2s" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.576191 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f1d50669b6d531679c57566e0c22f499125c94662f2d4e51edbb4a208f5a3f34"} Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.576306 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3767fd1cf2cf180596e480e92ff8b11ad3be31fcc73fc13b01346423b90b7495"} Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.576328 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3"} Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.576348 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"66e419c34471770e8be9908460758e1b0b826937ec72511e5e9a8f17f54b2bec"} Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.576256 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.578213 4906 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52" exitCode=0 Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.578288 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52"} Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.578380 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.578449 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.578474 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.578488 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.579654 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.579810 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.579944 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.580286 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.580333 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"f6dfcfacc19fca2a2d3163232cf910127d5fc217f2172b435d643985340464f2"} Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.580291 4906 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="f6dfcfacc19fca2a2d3163232cf910127d5fc217f2172b435d643985340464f2" exitCode=0 Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.581382 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.581498 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.581602 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.582384 4906 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="f23b21d14cdf03fbee2edf22368e005836dcee32f0eced46ee2e41e977a96335" exitCode=0 Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.582540 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.582577 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.582738 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"f23b21d14cdf03fbee2edf22368e005836dcee32f0eced46ee2e41e977a96335"} Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.584336 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.584368 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.584379 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.584421 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.584457 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.584479 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.586128 4906 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="aad657adbeab12281a0a6c5286d238cc8f9921f9674fcbfe0e7370c53ee9f389" exitCode=0 Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.586174 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"aad657adbeab12281a0a6c5286d238cc8f9921f9674fcbfe0e7370c53ee9f389"} Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.586204 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.589517 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.589627 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.589652 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.924680 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.926587 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.926643 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.926665 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:25 crc kubenswrapper[4906]: I0227 08:28:25.926697 4906 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 27 08:28:25 crc kubenswrapper[4906]: E0227 08:28:25.927449 4906 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.2:6443: connect: connection refused" node="crc" Feb 27 08:28:26 crc kubenswrapper[4906]: W0227 08:28:26.288248 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.2:6443: connect: connection refused Feb 27 08:28:26 crc kubenswrapper[4906]: E0227 08:28:26.288343 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.2:6443: connect: connection refused" logger="UnhandledError" Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.423485 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.2:6443: connect: connection refused Feb 27 08:28:26 crc kubenswrapper[4906]: W0227 08:28:26.459657 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.2:6443: connect: connection refused Feb 27 08:28:26 crc kubenswrapper[4906]: E0227 08:28:26.459778 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.2:6443: connect: connection refused" logger="UnhandledError" Feb 27 08:28:26 crc kubenswrapper[4906]: W0227 08:28:26.583128 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.2:6443: connect: connection refused Feb 27 08:28:26 crc kubenswrapper[4906]: E0227 08:28:26.583214 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.2:6443: connect: connection refused" logger="UnhandledError" Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.592500 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"f8ac0b72f2c937a25ee718957cbdccae98f76d3f6684e69bb1b89062040fdd63"} Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.592608 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.593612 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.593637 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.593646 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.597475 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"3b7a7e5b59d69cc97b3ad6f3d339eb5fe1edcfe4b9c46af545f363773fc6b322"} Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.597502 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"3c64bff4f3048eb83cd1d0db774c6448950a550b5074eaf9186244cea8344879"} Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.597512 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"a3065ac6de3ade07cd15c272b286223985df3c354018634e1aef5095952735dd"} Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.597573 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.598191 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.598216 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.598227 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.601608 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524"} Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.601636 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615"} Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.601648 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328"} Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.601659 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635"} Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.604313 4906 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="be9e110fce50dcce9faa851cc41868baceb592e8a0e8cdc2363bba73308d8c6f" exitCode=0 Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.604399 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.604830 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.605144 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"be9e110fce50dcce9faa851cc41868baceb592e8a0e8cdc2363bba73308d8c6f"} Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.605559 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.605579 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.605588 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.606107 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.606125 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.606133 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:26 crc kubenswrapper[4906]: W0227 08:28:26.801334 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.2:6443: connect: connection refused Feb 27 08:28:26 crc kubenswrapper[4906]: E0227 08:28:26.801449 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.2:6443: connect: connection refused" logger="UnhandledError" Feb 27 08:28:26 crc kubenswrapper[4906]: I0227 08:28:26.835836 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.005701 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.615389 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"36e53a8026ce8c04497c8f2ece03ddc49b80685e15339c9e83f78a770bf45acb"} Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.615447 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.616460 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.616513 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.616535 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.620166 4906 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="b77f4f0e00b2d7289c5e83f9ae56767316b65f0ae2e774d7c709cf423ba9b437" exitCode=0 Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.620301 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.620346 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.620488 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.620632 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.620513 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"b77f4f0e00b2d7289c5e83f9ae56767316b65f0ae2e774d7c709cf423ba9b437"} Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.622262 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.622383 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.622285 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.622504 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.622526 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.622466 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.622595 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.622358 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.622625 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.622647 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.622700 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:27 crc kubenswrapper[4906]: I0227 08:28:27.622708 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:28 crc kubenswrapper[4906]: I0227 08:28:28.486228 4906 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 27 08:28:28 crc kubenswrapper[4906]: I0227 08:28:28.630510 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"737a44f829a3425d302c5cae15a8f6122fb3d0af83df591f77f2621228ff1421"} Feb 27 08:28:28 crc kubenswrapper[4906]: I0227 08:28:28.631299 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"29eff573776c7b8b5e81766ab5f94d3e624e79bb46c99974bca712ae636f6cb5"} Feb 27 08:28:28 crc kubenswrapper[4906]: I0227 08:28:28.630709 4906 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 27 08:28:28 crc kubenswrapper[4906]: I0227 08:28:28.630604 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:28 crc kubenswrapper[4906]: I0227 08:28:28.631486 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:28 crc kubenswrapper[4906]: I0227 08:28:28.633287 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:28 crc kubenswrapper[4906]: I0227 08:28:28.633355 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:28 crc kubenswrapper[4906]: I0227 08:28:28.633379 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:28 crc kubenswrapper[4906]: I0227 08:28:28.635670 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:28 crc kubenswrapper[4906]: I0227 08:28:28.635752 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:28 crc kubenswrapper[4906]: I0227 08:28:28.635773 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:28 crc kubenswrapper[4906]: I0227 08:28:28.672663 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.127915 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.130777 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.130832 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.130848 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.130914 4906 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.286441 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.406241 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.406583 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.408332 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.408378 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.408395 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.417134 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.639298 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f4171fc6975c6694f0a11072977703b9835265735638ff89800dbe7b6c0b8b45"} Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.639372 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"e9e42a7d168646c14021320a60fecff660555424fa0301e8b94ec2a33a2fa264"} Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.639395 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f337ed2892c2b7d76961dd9ffddf9c734b56bd4c33b5092d4130aaf4dac438a2"} Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.639419 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.639602 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.639682 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.641479 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.641545 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.641566 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.641686 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.641731 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.641750 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.641949 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.641975 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:29 crc kubenswrapper[4906]: I0227 08:28:29.641990 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:30 crc kubenswrapper[4906]: I0227 08:28:30.328909 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:28:30 crc kubenswrapper[4906]: I0227 08:28:30.642429 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:30 crc kubenswrapper[4906]: I0227 08:28:30.642514 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:30 crc kubenswrapper[4906]: I0227 08:28:30.642545 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:30 crc kubenswrapper[4906]: I0227 08:28:30.644743 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:30 crc kubenswrapper[4906]: I0227 08:28:30.644791 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:30 crc kubenswrapper[4906]: I0227 08:28:30.644809 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:30 crc kubenswrapper[4906]: I0227 08:28:30.644820 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:30 crc kubenswrapper[4906]: I0227 08:28:30.644759 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:30 crc kubenswrapper[4906]: I0227 08:28:30.644956 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:30 crc kubenswrapper[4906]: I0227 08:28:30.644851 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:30 crc kubenswrapper[4906]: I0227 08:28:30.644991 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:30 crc kubenswrapper[4906]: I0227 08:28:30.644990 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:31 crc kubenswrapper[4906]: I0227 08:28:31.702569 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Feb 27 08:28:31 crc kubenswrapper[4906]: I0227 08:28:31.702731 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:31 crc kubenswrapper[4906]: I0227 08:28:31.703793 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:31 crc kubenswrapper[4906]: I0227 08:28:31.703827 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:31 crc kubenswrapper[4906]: I0227 08:28:31.703839 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:31 crc kubenswrapper[4906]: I0227 08:28:31.828458 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:28:31 crc kubenswrapper[4906]: I0227 08:28:31.828710 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:31 crc kubenswrapper[4906]: I0227 08:28:31.830187 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:31 crc kubenswrapper[4906]: I0227 08:28:31.830238 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:31 crc kubenswrapper[4906]: I0227 08:28:31.830254 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:31 crc kubenswrapper[4906]: I0227 08:28:31.941041 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:28:31 crc kubenswrapper[4906]: I0227 08:28:31.941260 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:31 crc kubenswrapper[4906]: I0227 08:28:31.942491 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:31 crc kubenswrapper[4906]: I0227 08:28:31.942532 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:31 crc kubenswrapper[4906]: I0227 08:28:31.942545 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:32 crc kubenswrapper[4906]: I0227 08:28:32.302680 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Feb 27 08:28:32 crc kubenswrapper[4906]: I0227 08:28:32.646702 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:32 crc kubenswrapper[4906]: I0227 08:28:32.647588 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:32 crc kubenswrapper[4906]: I0227 08:28:32.647624 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:32 crc kubenswrapper[4906]: I0227 08:28:32.647635 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:32 crc kubenswrapper[4906]: E0227 08:28:32.820289 4906 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 27 08:28:33 crc kubenswrapper[4906]: I0227 08:28:33.329071 4906 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 27 08:28:33 crc kubenswrapper[4906]: I0227 08:28:33.329153 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 27 08:28:37 crc kubenswrapper[4906]: I0227 08:28:37.011721 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:28:37 crc kubenswrapper[4906]: I0227 08:28:37.011995 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:37 crc kubenswrapper[4906]: I0227 08:28:37.013345 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:37 crc kubenswrapper[4906]: I0227 08:28:37.013403 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:37 crc kubenswrapper[4906]: I0227 08:28:37.013415 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:37 crc kubenswrapper[4906]: I0227 08:28:37.422969 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Feb 27 08:28:38 crc kubenswrapper[4906]: E0227 08:28:38.383995 4906 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:38Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Feb 27 08:28:38 crc kubenswrapper[4906]: W0227 08:28:38.385523 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:38Z is after 2026-02-23T05:33:13Z Feb 27 08:28:38 crc kubenswrapper[4906]: E0227 08:28:38.385610 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:38Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Feb 27 08:28:38 crc kubenswrapper[4906]: W0227 08:28:38.389603 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:38Z is after 2026-02-23T05:33:13Z Feb 27 08:28:38 crc kubenswrapper[4906]: E0227 08:28:38.389706 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:38Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Feb 27 08:28:38 crc kubenswrapper[4906]: I0227 08:28:38.395854 4906 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Feb 27 08:28:38 crc kubenswrapper[4906]: I0227 08:28:38.395947 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Feb 27 08:28:38 crc kubenswrapper[4906]: E0227 08:28:38.396377 4906 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:38Z is after 2026-02-23T05:33:13Z" interval="6.4s" Feb 27 08:28:38 crc kubenswrapper[4906]: W0227 08:28:38.396916 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:38Z is after 2026-02-23T05:33:13Z Feb 27 08:28:38 crc kubenswrapper[4906]: E0227 08:28:38.397018 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:38Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Feb 27 08:28:38 crc kubenswrapper[4906]: E0227 08:28:38.401854 4906 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:38Z is after 2026-02-23T05:33:13Z" node="crc" Feb 27 08:28:38 crc kubenswrapper[4906]: I0227 08:28:38.406162 4906 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Feb 27 08:28:38 crc kubenswrapper[4906]: I0227 08:28:38.406235 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Feb 27 08:28:38 crc kubenswrapper[4906]: E0227 08:28:38.411811 4906 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:38Z is after 2026-02-23T05:33:13Z" event="&Event{ObjectMeta:{crc.18980d242e89f0e3 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.421065955 +0000 UTC m=+0.815467605,LastTimestamp:2026-02-27 08:28:22.421065955 +0000 UTC m=+0.815467605,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:38 crc kubenswrapper[4906]: W0227 08:28:38.413733 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:38Z is after 2026-02-23T05:33:13Z Feb 27 08:28:38 crc kubenswrapper[4906]: E0227 08:28:38.413869 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:38Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Feb 27 08:28:38 crc kubenswrapper[4906]: I0227 08:28:38.443763 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:38Z is after 2026-02-23T05:33:13Z Feb 27 08:28:38 crc kubenswrapper[4906]: I0227 08:28:38.666716 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 27 08:28:38 crc kubenswrapper[4906]: I0227 08:28:38.668665 4906 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="36e53a8026ce8c04497c8f2ece03ddc49b80685e15339c9e83f78a770bf45acb" exitCode=255 Feb 27 08:28:38 crc kubenswrapper[4906]: I0227 08:28:38.668702 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"36e53a8026ce8c04497c8f2ece03ddc49b80685e15339c9e83f78a770bf45acb"} Feb 27 08:28:38 crc kubenswrapper[4906]: I0227 08:28:38.668857 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:38 crc kubenswrapper[4906]: I0227 08:28:38.671934 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:38 crc kubenswrapper[4906]: I0227 08:28:38.671961 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:38 crc kubenswrapper[4906]: I0227 08:28:38.671969 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:38 crc kubenswrapper[4906]: I0227 08:28:38.673356 4906 scope.go:117] "RemoveContainer" containerID="36e53a8026ce8c04497c8f2ece03ddc49b80685e15339c9e83f78a770bf45acb" Feb 27 08:28:39 crc kubenswrapper[4906]: I0227 08:28:39.425913 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:39Z is after 2026-02-23T05:33:13Z Feb 27 08:28:39 crc kubenswrapper[4906]: I0227 08:28:39.674007 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 27 08:28:39 crc kubenswrapper[4906]: I0227 08:28:39.675911 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"32e2234430d6a1b634993bea89301366a3eaebd9aeaab10c61294b75fa6b9e8e"} Feb 27 08:28:39 crc kubenswrapper[4906]: I0227 08:28:39.676109 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:39 crc kubenswrapper[4906]: I0227 08:28:39.677045 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:39 crc kubenswrapper[4906]: I0227 08:28:39.677092 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:39 crc kubenswrapper[4906]: I0227 08:28:39.677103 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:40 crc kubenswrapper[4906]: I0227 08:28:40.426210 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:40Z is after 2026-02-23T05:33:13Z Feb 27 08:28:40 crc kubenswrapper[4906]: I0227 08:28:40.681843 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 27 08:28:40 crc kubenswrapper[4906]: I0227 08:28:40.682754 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 27 08:28:40 crc kubenswrapper[4906]: I0227 08:28:40.685934 4906 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="32e2234430d6a1b634993bea89301366a3eaebd9aeaab10c61294b75fa6b9e8e" exitCode=255 Feb 27 08:28:40 crc kubenswrapper[4906]: I0227 08:28:40.686012 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"32e2234430d6a1b634993bea89301366a3eaebd9aeaab10c61294b75fa6b9e8e"} Feb 27 08:28:40 crc kubenswrapper[4906]: I0227 08:28:40.686076 4906 scope.go:117] "RemoveContainer" containerID="36e53a8026ce8c04497c8f2ece03ddc49b80685e15339c9e83f78a770bf45acb" Feb 27 08:28:40 crc kubenswrapper[4906]: I0227 08:28:40.686399 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:40 crc kubenswrapper[4906]: I0227 08:28:40.688245 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:40 crc kubenswrapper[4906]: I0227 08:28:40.688308 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:40 crc kubenswrapper[4906]: I0227 08:28:40.688347 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:40 crc kubenswrapper[4906]: I0227 08:28:40.689364 4906 scope.go:117] "RemoveContainer" containerID="32e2234430d6a1b634993bea89301366a3eaebd9aeaab10c61294b75fa6b9e8e" Feb 27 08:28:40 crc kubenswrapper[4906]: E0227 08:28:40.690338 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 27 08:28:41 crc kubenswrapper[4906]: I0227 08:28:41.425785 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:41Z is after 2026-02-23T05:33:13Z Feb 27 08:28:41 crc kubenswrapper[4906]: I0227 08:28:41.690979 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 27 08:28:41 crc kubenswrapper[4906]: I0227 08:28:41.735390 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Feb 27 08:28:41 crc kubenswrapper[4906]: I0227 08:28:41.735640 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:41 crc kubenswrapper[4906]: I0227 08:28:41.737249 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:41 crc kubenswrapper[4906]: I0227 08:28:41.737324 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:41 crc kubenswrapper[4906]: I0227 08:28:41.737342 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:41 crc kubenswrapper[4906]: I0227 08:28:41.749053 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Feb 27 08:28:41 crc kubenswrapper[4906]: I0227 08:28:41.835259 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:28:41 crc kubenswrapper[4906]: I0227 08:28:41.835526 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:41 crc kubenswrapper[4906]: I0227 08:28:41.837818 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:41 crc kubenswrapper[4906]: I0227 08:28:41.837957 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:41 crc kubenswrapper[4906]: I0227 08:28:41.837985 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:41 crc kubenswrapper[4906]: I0227 08:28:41.838920 4906 scope.go:117] "RemoveContainer" containerID="32e2234430d6a1b634993bea89301366a3eaebd9aeaab10c61294b75fa6b9e8e" Feb 27 08:28:41 crc kubenswrapper[4906]: E0227 08:28:41.839199 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 27 08:28:41 crc kubenswrapper[4906]: I0227 08:28:41.842868 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:28:42 crc kubenswrapper[4906]: I0227 08:28:42.427838 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:42Z is after 2026-02-23T05:33:13Z Feb 27 08:28:42 crc kubenswrapper[4906]: I0227 08:28:42.698060 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:42 crc kubenswrapper[4906]: I0227 08:28:42.698983 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:42 crc kubenswrapper[4906]: I0227 08:28:42.699999 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:42 crc kubenswrapper[4906]: I0227 08:28:42.700061 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:42 crc kubenswrapper[4906]: I0227 08:28:42.700086 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:42 crc kubenswrapper[4906]: I0227 08:28:42.700361 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:42 crc kubenswrapper[4906]: I0227 08:28:42.700405 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:42 crc kubenswrapper[4906]: I0227 08:28:42.700423 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:42 crc kubenswrapper[4906]: I0227 08:28:42.701387 4906 scope.go:117] "RemoveContainer" containerID="32e2234430d6a1b634993bea89301366a3eaebd9aeaab10c61294b75fa6b9e8e" Feb 27 08:28:42 crc kubenswrapper[4906]: E0227 08:28:42.701728 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 27 08:28:42 crc kubenswrapper[4906]: E0227 08:28:42.820759 4906 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 27 08:28:43 crc kubenswrapper[4906]: I0227 08:28:43.331060 4906 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 27 08:28:43 crc kubenswrapper[4906]: I0227 08:28:43.331191 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 27 08:28:43 crc kubenswrapper[4906]: I0227 08:28:43.416504 4906 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:28:43 crc kubenswrapper[4906]: I0227 08:28:43.426165 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:43Z is after 2026-02-23T05:33:13Z Feb 27 08:28:43 crc kubenswrapper[4906]: I0227 08:28:43.701024 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:43 crc kubenswrapper[4906]: I0227 08:28:43.702245 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:43 crc kubenswrapper[4906]: I0227 08:28:43.702387 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:43 crc kubenswrapper[4906]: I0227 08:28:43.702401 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:43 crc kubenswrapper[4906]: I0227 08:28:43.703249 4906 scope.go:117] "RemoveContainer" containerID="32e2234430d6a1b634993bea89301366a3eaebd9aeaab10c61294b75fa6b9e8e" Feb 27 08:28:43 crc kubenswrapper[4906]: E0227 08:28:43.703506 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 27 08:28:44 crc kubenswrapper[4906]: I0227 08:28:44.426582 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:44Z is after 2026-02-23T05:33:13Z Feb 27 08:28:44 crc kubenswrapper[4906]: I0227 08:28:44.802374 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:44 crc kubenswrapper[4906]: E0227 08:28:44.803507 4906 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:44Z is after 2026-02-23T05:33:13Z" interval="7s" Feb 27 08:28:44 crc kubenswrapper[4906]: I0227 08:28:44.803551 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:44 crc kubenswrapper[4906]: I0227 08:28:44.803690 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:44 crc kubenswrapper[4906]: I0227 08:28:44.803706 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:44 crc kubenswrapper[4906]: I0227 08:28:44.803744 4906 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 27 08:28:44 crc kubenswrapper[4906]: E0227 08:28:44.810268 4906 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:44Z is after 2026-02-23T05:33:13Z" node="crc" Feb 27 08:28:45 crc kubenswrapper[4906]: I0227 08:28:45.428270 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:45Z is after 2026-02-23T05:33:13Z Feb 27 08:28:45 crc kubenswrapper[4906]: W0227 08:28:45.856954 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:45Z is after 2026-02-23T05:33:13Z Feb 27 08:28:45 crc kubenswrapper[4906]: E0227 08:28:45.857821 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:45Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Feb 27 08:28:46 crc kubenswrapper[4906]: I0227 08:28:46.427139 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:46Z is after 2026-02-23T05:33:13Z Feb 27 08:28:46 crc kubenswrapper[4906]: W0227 08:28:46.610026 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:46Z is after 2026-02-23T05:33:13Z Feb 27 08:28:46 crc kubenswrapper[4906]: E0227 08:28:46.610165 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:46Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Feb 27 08:28:46 crc kubenswrapper[4906]: I0227 08:28:46.613755 4906 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 27 08:28:46 crc kubenswrapper[4906]: E0227 08:28:46.619586 4906 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:46Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Feb 27 08:28:47 crc kubenswrapper[4906]: I0227 08:28:47.426421 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:47Z is after 2026-02-23T05:33:13Z Feb 27 08:28:48 crc kubenswrapper[4906]: E0227 08:28:48.418100 4906 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:48Z is after 2026-02-23T05:33:13Z" event="&Event{ObjectMeta:{crc.18980d242e89f0e3 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.421065955 +0000 UTC m=+0.815467605,LastTimestamp:2026-02-27 08:28:22.421065955 +0000 UTC m=+0.815467605,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:48 crc kubenswrapper[4906]: I0227 08:28:48.427944 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:48Z is after 2026-02-23T05:33:13Z Feb 27 08:28:48 crc kubenswrapper[4906]: I0227 08:28:48.673476 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:28:48 crc kubenswrapper[4906]: I0227 08:28:48.673803 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:48 crc kubenswrapper[4906]: I0227 08:28:48.675683 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:48 crc kubenswrapper[4906]: I0227 08:28:48.675747 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:48 crc kubenswrapper[4906]: I0227 08:28:48.675770 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:48 crc kubenswrapper[4906]: I0227 08:28:48.676706 4906 scope.go:117] "RemoveContainer" containerID="32e2234430d6a1b634993bea89301366a3eaebd9aeaab10c61294b75fa6b9e8e" Feb 27 08:28:48 crc kubenswrapper[4906]: E0227 08:28:48.677152 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 27 08:28:48 crc kubenswrapper[4906]: W0227 08:28:48.993489 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:48Z is after 2026-02-23T05:33:13Z Feb 27 08:28:48 crc kubenswrapper[4906]: E0227 08:28:48.993598 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:48Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Feb 27 08:28:49 crc kubenswrapper[4906]: I0227 08:28:49.426948 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:49Z is after 2026-02-23T05:33:13Z Feb 27 08:28:50 crc kubenswrapper[4906]: I0227 08:28:50.425396 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:50Z is after 2026-02-23T05:33:13Z Feb 27 08:28:51 crc kubenswrapper[4906]: W0227 08:28:51.136237 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:51Z is after 2026-02-23T05:33:13Z Feb 27 08:28:51 crc kubenswrapper[4906]: E0227 08:28:51.136378 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:51Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Feb 27 08:28:51 crc kubenswrapper[4906]: I0227 08:28:51.429035 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:51Z is after 2026-02-23T05:33:13Z Feb 27 08:28:51 crc kubenswrapper[4906]: E0227 08:28:51.808621 4906 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:51Z is after 2026-02-23T05:33:13Z" interval="7s" Feb 27 08:28:51 crc kubenswrapper[4906]: I0227 08:28:51.810612 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:51 crc kubenswrapper[4906]: I0227 08:28:51.812138 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:51 crc kubenswrapper[4906]: I0227 08:28:51.812187 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:51 crc kubenswrapper[4906]: I0227 08:28:51.812201 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:51 crc kubenswrapper[4906]: I0227 08:28:51.812229 4906 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 27 08:28:51 crc kubenswrapper[4906]: E0227 08:28:51.815851 4906 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:51Z is after 2026-02-23T05:33:13Z" node="crc" Feb 27 08:28:52 crc kubenswrapper[4906]: I0227 08:28:52.428421 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:52Z is after 2026-02-23T05:33:13Z Feb 27 08:28:52 crc kubenswrapper[4906]: E0227 08:28:52.821223 4906 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 27 08:28:53 crc kubenswrapper[4906]: I0227 08:28:53.329299 4906 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 27 08:28:53 crc kubenswrapper[4906]: I0227 08:28:53.329450 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 27 08:28:53 crc kubenswrapper[4906]: I0227 08:28:53.329575 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:28:53 crc kubenswrapper[4906]: I0227 08:28:53.329864 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:53 crc kubenswrapper[4906]: I0227 08:28:53.331834 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:53 crc kubenswrapper[4906]: I0227 08:28:53.331928 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:53 crc kubenswrapper[4906]: I0227 08:28:53.331940 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:53 crc kubenswrapper[4906]: I0227 08:28:53.332725 4906 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="cluster-policy-controller" containerStatusID={"Type":"cri-o","ID":"1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container cluster-policy-controller failed startup probe, will be restarted" Feb 27 08:28:53 crc kubenswrapper[4906]: I0227 08:28:53.333002 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" containerID="cri-o://1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3" gracePeriod=30 Feb 27 08:28:53 crc kubenswrapper[4906]: I0227 08:28:53.429140 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:53Z is after 2026-02-23T05:33:13Z Feb 27 08:28:53 crc kubenswrapper[4906]: I0227 08:28:53.735732 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/cluster-policy-controller/0.log" Feb 27 08:28:53 crc kubenswrapper[4906]: I0227 08:28:53.736325 4906 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3" exitCode=255 Feb 27 08:28:53 crc kubenswrapper[4906]: I0227 08:28:53.736410 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3"} Feb 27 08:28:54 crc kubenswrapper[4906]: I0227 08:28:54.429235 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:28:54 crc kubenswrapper[4906]: I0227 08:28:54.744916 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/cluster-policy-controller/0.log" Feb 27 08:28:54 crc kubenswrapper[4906]: I0227 08:28:54.745563 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7f8ee8ebc8754ebd9809529017b18da429ecb0f3dc78f2a04aa8a51cbf6fcc0b"} Feb 27 08:28:54 crc kubenswrapper[4906]: I0227 08:28:54.745841 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:54 crc kubenswrapper[4906]: I0227 08:28:54.747736 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:54 crc kubenswrapper[4906]: I0227 08:28:54.748057 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:54 crc kubenswrapper[4906]: I0227 08:28:54.748088 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:55 crc kubenswrapper[4906]: I0227 08:28:55.426459 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:28:55 crc kubenswrapper[4906]: I0227 08:28:55.750029 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:55 crc kubenswrapper[4906]: I0227 08:28:55.752282 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:55 crc kubenswrapper[4906]: I0227 08:28:55.752369 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:55 crc kubenswrapper[4906]: I0227 08:28:55.752391 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:56 crc kubenswrapper[4906]: I0227 08:28:56.430282 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:28:57 crc kubenswrapper[4906]: I0227 08:28:57.430241 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.426927 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d242e89f0e3 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.421065955 +0000 UTC m=+0.815467605,LastTimestamp:2026-02-27 08:28:22.421065955 +0000 UTC m=+0.815467605,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: I0227 08:28:58.427619 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.430970 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338df396 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.50521487 +0000 UTC m=+0.899616490,LastTimestamp:2026-02-27 08:28:22.50521487 +0000 UTC m=+0.899616490,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.434875 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338e4d50 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.50523784 +0000 UTC m=+0.899639460,LastTimestamp:2026-02-27 08:28:22.50523784 +0000 UTC m=+0.899639460,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.439808 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338e7ecf default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.505250511 +0000 UTC m=+0.899652131,LastTimestamp:2026-02-27 08:28:22.505250511 +0000 UTC m=+0.899652131,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.446282 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d2446085c55 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeAllocatableEnforced,Message:Updated Node Allocatable limit across pods,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.815226965 +0000 UTC m=+1.209628605,LastTimestamp:2026-02-27 08:28:22.815226965 +0000 UTC m=+1.209628605,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.451126 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.18980d24338df396\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338df396 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.50521487 +0000 UTC m=+0.899616490,LastTimestamp:2026-02-27 08:28:22.854649051 +0000 UTC m=+1.249050671,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.454747 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.18980d24338e4d50\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338e4d50 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.50523784 +0000 UTC m=+0.899639460,LastTimestamp:2026-02-27 08:28:22.854673442 +0000 UTC m=+1.249075062,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.459293 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.18980d24338e7ecf\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338e7ecf default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.505250511 +0000 UTC m=+0.899652131,LastTimestamp:2026-02-27 08:28:22.854685952 +0000 UTC m=+1.249087572,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.464034 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.18980d24338df396\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338df396 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.50521487 +0000 UTC m=+0.899616490,LastTimestamp:2026-02-27 08:28:22.859687162 +0000 UTC m=+1.254088792,Count:3,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.468593 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.18980d24338e4d50\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338e4d50 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.50523784 +0000 UTC m=+0.899639460,LastTimestamp:2026-02-27 08:28:22.859730992 +0000 UTC m=+1.254132612,Count:3,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.472288 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.18980d24338e7ecf\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338e7ecf default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.505250511 +0000 UTC m=+0.899652131,LastTimestamp:2026-02-27 08:28:22.859784573 +0000 UTC m=+1.254186203,Count:3,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.476538 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.18980d24338df396\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338df396 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.50521487 +0000 UTC m=+0.899616490,LastTimestamp:2026-02-27 08:28:22.860167497 +0000 UTC m=+1.254569127,Count:4,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.481271 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.18980d24338e4d50\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338e4d50 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.50523784 +0000 UTC m=+0.899639460,LastTimestamp:2026-02-27 08:28:22.860259078 +0000 UTC m=+1.254660688,Count:4,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.485131 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.18980d24338e7ecf\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338e7ecf default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.505250511 +0000 UTC m=+0.899652131,LastTimestamp:2026-02-27 08:28:22.860340519 +0000 UTC m=+1.254742139,Count:4,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.489537 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.18980d24338df396\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338df396 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.50521487 +0000 UTC m=+0.899616490,LastTimestamp:2026-02-27 08:28:22.862186227 +0000 UTC m=+1.256587847,Count:5,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.495611 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.18980d24338e4d50\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338e4d50 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.50523784 +0000 UTC m=+0.899639460,LastTimestamp:2026-02-27 08:28:22.862203627 +0000 UTC m=+1.256605257,Count:5,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.501764 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.18980d24338e7ecf\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338e7ecf default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.505250511 +0000 UTC m=+0.899652131,LastTimestamp:2026-02-27 08:28:22.862222498 +0000 UTC m=+1.256624128,Count:5,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.506378 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.18980d24338df396\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338df396 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.50521487 +0000 UTC m=+0.899616490,LastTimestamp:2026-02-27 08:28:22.863273308 +0000 UTC m=+1.257674908,Count:6,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.510872 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.18980d24338e4d50\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338e4d50 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.50523784 +0000 UTC m=+0.899639460,LastTimestamp:2026-02-27 08:28:22.863288948 +0000 UTC m=+1.257690558,Count:6,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.514991 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.18980d24338e7ecf\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338e7ecf default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.505250511 +0000 UTC m=+0.899652131,LastTimestamp:2026-02-27 08:28:22.863680582 +0000 UTC m=+1.258082192,Count:6,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.518940 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.18980d24338df396\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338df396 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.50521487 +0000 UTC m=+0.899616490,LastTimestamp:2026-02-27 08:28:22.867136917 +0000 UTC m=+1.261538537,Count:7,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.524094 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.18980d24338e4d50\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338e4d50 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.50523784 +0000 UTC m=+0.899639460,LastTimestamp:2026-02-27 08:28:22.867167897 +0000 UTC m=+1.261569517,Count:7,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.529143 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.18980d24338e7ecf\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338e7ecf default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.505250511 +0000 UTC m=+0.899652131,LastTimestamp:2026-02-27 08:28:22.867195007 +0000 UTC m=+1.261596637,Count:7,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.534391 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.18980d24338df396\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338df396 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.50521487 +0000 UTC m=+0.899616490,LastTimestamp:2026-02-27 08:28:22.867283288 +0000 UTC m=+1.261684898,Count:8,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.539220 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.18980d24338e4d50\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.18980d24338e4d50 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:22.50523784 +0000 UTC m=+0.899639460,LastTimestamp:2026-02-27 08:28:22.867296418 +0000 UTC m=+1.261698028,Count:8,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.545262 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d246775463c openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:23.37601286 +0000 UTC m=+1.770414470,LastTimestamp:2026-02-27 08:28:23.37601286 +0000 UTC m=+1.770414470,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.549381 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-machine-config-operator\"" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.18980d2467768967 openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:23.376095591 +0000 UTC m=+1.770497231,LastTimestamp:2026-02-27 08:28:23.376095591 +0000 UTC m=+1.770497231,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.553643 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d2467fec25f openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:23.385023071 +0000 UTC m=+1.779424681,LastTimestamp:2026-02-27 08:28:23.385023071 +0000 UTC m=+1.779424681,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.558694 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.18980d246e10ae22 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:23.486860834 +0000 UTC m=+1.881262484,LastTimestamp:2026-02-27 08:28:23.486860834 +0000 UTC m=+1.881262484,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.563061 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.18980d246e429cff openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{wait-for-host-port},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:23.490133247 +0000 UTC m=+1.884534867,LastTimestamp:2026-02-27 08:28:23.490133247 +0000 UTC m=+1.884534867,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.567690 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d24b458abe4 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Created,Message:Created container setup,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:24.665983972 +0000 UTC m=+3.060385582,LastTimestamp:2026-02-27 08:28:24.665983972 +0000 UTC m=+3.060385582,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.571659 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d24b47c8689 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Created,Message:Created container setup,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:24.668333705 +0000 UTC m=+3.062735325,LastTimestamp:2026-02-27 08:28:24.668333705 +0000 UTC m=+3.062735325,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.575619 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-machine-config-operator\"" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.18980d24b4b6de8c openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Created,Message:Created container setup,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:24.672157324 +0000 UTC m=+3.066558964,LastTimestamp:2026-02-27 08:28:24.672157324 +0000 UTC m=+3.066558964,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.579841 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.18980d24b4b9ce5a openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{wait-for-host-port},},Reason:Created,Message:Created container wait-for-host-port,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:24.672349786 +0000 UTC m=+3.066751406,LastTimestamp:2026-02-27 08:28:24.672349786 +0000 UTC m=+3.066751406,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.584660 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.18980d24b4ba285a openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager},},Reason:Created,Message:Created container kube-controller-manager,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:24.672372826 +0000 UTC m=+3.066774466,LastTimestamp:2026-02-27 08:28:24.672372826 +0000 UTC m=+3.066774466,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.590585 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-machine-config-operator\"" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.18980d24b57cd308 openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Started,Message:Started container setup,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:24.685130504 +0000 UTC m=+3.079532134,LastTimestamp:2026-02-27 08:28:24.685130504 +0000 UTC m=+3.079532134,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.595573 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d24b57da4cd openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Started,Message:Started container setup,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:24.685184205 +0000 UTC m=+3.079585835,LastTimestamp:2026-02-27 08:28:24.685184205 +0000 UTC m=+3.079585835,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.600085 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d24b5a967a6 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Started,Message:Started container setup,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:24.688052134 +0000 UTC m=+3.082453774,LastTimestamp:2026-02-27 08:28:24.688052134 +0000 UTC m=+3.082453774,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.606837 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.18980d24b5a9faae openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{wait-for-host-port},},Reason:Started,Message:Started container wait-for-host-port,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:24.688089774 +0000 UTC m=+3.082491404,LastTimestamp:2026-02-27 08:28:24.688089774 +0000 UTC m=+3.082491404,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.612296 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.18980d24b5ae97a1 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager},},Reason:Started,Message:Started container kube-controller-manager,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:24.688392097 +0000 UTC m=+3.082793737,LastTimestamp:2026-02-27 08:28:24.688392097 +0000 UTC m=+3.082793737,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.616980 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.18980d24b5c7c9bc openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:24.690043324 +0000 UTC m=+3.084444934,LastTimestamp:2026-02-27 08:28:24.690043324 +0000 UTC m=+3.084444934,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.621661 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.18980d24c885f447 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Created,Message:Created container cluster-policy-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.004495943 +0000 UTC m=+3.398897583,LastTimestamp:2026-02-27 08:28:25.004495943 +0000 UTC m=+3.398897583,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.626468 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.18980d24c97632c5 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Started,Message:Started container cluster-policy-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.020240581 +0000 UTC m=+3.414642191,LastTimestamp:2026-02-27 08:28:25.020240581 +0000 UTC m=+3.414642191,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.630010 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.18980d24c98d7e77 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager-cert-syncer},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.021767287 +0000 UTC m=+3.416168927,LastTimestamp:2026-02-27 08:28:25.021767287 +0000 UTC m=+3.416168927,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.634032 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.18980d24d44876a4 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager-cert-syncer},},Reason:Created,Message:Created container kube-controller-manager-cert-syncer,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.201792676 +0000 UTC m=+3.596194326,LastTimestamp:2026-02-27 08:28:25.201792676 +0000 UTC m=+3.596194326,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.638083 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.18980d24d54f14d9 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager-cert-syncer},},Reason:Started,Message:Started container kube-controller-manager-cert-syncer,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.219003609 +0000 UTC m=+3.613405259,LastTimestamp:2026-02-27 08:28:25.219003609 +0000 UTC m=+3.613405259,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.642228 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.18980d24d5689383 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager-recovery-controller},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.220674435 +0000 UTC m=+3.615076085,LastTimestamp:2026-02-27 08:28:25.220674435 +0000 UTC m=+3.615076085,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.646014 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.18980d24e307b004 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager-recovery-controller},},Reason:Created,Message:Created container kube-controller-manager-recovery-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.449205764 +0000 UTC m=+3.843607374,LastTimestamp:2026-02-27 08:28:25.449205764 +0000 UTC m=+3.843607374,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.652950 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.18980d24e3e4519a openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager-recovery-controller},},Reason:Started,Message:Started container kube-controller-manager-recovery-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.46366505 +0000 UTC m=+3.858066700,LastTimestamp:2026-02-27 08:28:25.46366505 +0000 UTC m=+3.858066700,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.657930 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d24eaf6780e openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.582295054 +0000 UTC m=+3.976696704,LastTimestamp:2026-02-27 08:28:25.582295054 +0000 UTC m=+3.976696704,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.662063 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d24eb087e6d openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{etcd-ensure-env-vars},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.583476333 +0000 UTC m=+3.977877943,LastTimestamp:2026-02-27 08:28:25.583476333 +0000 UTC m=+3.977877943,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.666587 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-machine-config-operator\"" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.18980d24eb508a1f openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-rbac-proxy-crio},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.588197919 +0000 UTC m=+3.982599569,LastTimestamp:2026-02-27 08:28:25.588197919 +0000 UTC m=+3.982599569,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.672185 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.18980d24eb969e73 openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.592790643 +0000 UTC m=+3.987192293,LastTimestamp:2026-02-27 08:28:25.592790643 +0000 UTC m=+3.987192293,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.676434 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d24f96766aa openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{etcd-ensure-env-vars},},Reason:Created,Message:Created container etcd-ensure-env-vars,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.824577194 +0000 UTC m=+4.218978794,LastTimestamp:2026-02-27 08:28:25.824577194 +0000 UTC m=+4.218978794,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.680172 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d24f98bdad9 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:Created,Message:Created container kube-apiserver,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.826966233 +0000 UTC m=+4.221367843,LastTimestamp:2026-02-27 08:28:25.826966233 +0000 UTC m=+4.221367843,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.685205 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.18980d24f9ad8273 openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler},},Reason:Created,Message:Created container kube-scheduler,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.829171827 +0000 UTC m=+4.223573447,LastTimestamp:2026-02-27 08:28:25.829171827 +0000 UTC m=+4.223573447,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.691226 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-machine-config-operator\"" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.18980d24fa479830 openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-rbac-proxy-crio},},Reason:Created,Message:Created container kube-rbac-proxy-crio,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.839269936 +0000 UTC m=+4.233671546,LastTimestamp:2026-02-27 08:28:25.839269936 +0000 UTC m=+4.233671546,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.697774 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d24faf6cd49 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:Started,Message:Started container kube-apiserver,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.850752329 +0000 UTC m=+4.245153949,LastTimestamp:2026-02-27 08:28:25.850752329 +0000 UTC m=+4.245153949,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.705207 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d24fb079996 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-cert-syncer},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.851853206 +0000 UTC m=+4.246254816,LastTimestamp:2026-02-27 08:28:25.851853206 +0000 UTC m=+4.246254816,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.710921 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d24fb6af915 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{etcd-ensure-env-vars},},Reason:Started,Message:Started container etcd-ensure-env-vars,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.858365717 +0000 UTC m=+4.252767327,LastTimestamp:2026-02-27 08:28:25.858365717 +0000 UTC m=+4.252767327,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.716248 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-machine-config-operator\"" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.18980d24fb6b03f1 openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-rbac-proxy-crio},},Reason:Started,Message:Started container kube-rbac-proxy-crio,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.858368497 +0000 UTC m=+4.252770107,LastTimestamp:2026-02-27 08:28:25.858368497 +0000 UTC m=+4.252770107,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.721502 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.18980d24fb6ecf23 openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler},},Reason:Started,Message:Started container kube-scheduler,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.858617123 +0000 UTC m=+4.253018733,LastTimestamp:2026-02-27 08:28:25.858617123 +0000 UTC m=+4.253018733,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.730175 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.18980d24fb9680e9 openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler-cert-syncer},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.861218537 +0000 UTC m=+4.255620147,LastTimestamp:2026-02-27 08:28:25.861218537 +0000 UTC m=+4.255620147,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.736094 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.18980d2507adb946 openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler-cert-syncer},},Reason:Created,Message:Created container kube-scheduler-cert-syncer,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.064066886 +0000 UTC m=+4.458468496,LastTimestamp:2026-02-27 08:28:26.064066886 +0000 UTC m=+4.458468496,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.742360 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d2507ddad3e openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-cert-syncer},},Reason:Created,Message:Created container kube-apiserver-cert-syncer,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.067209534 +0000 UTC m=+4.461611154,LastTimestamp:2026-02-27 08:28:26.067209534 +0000 UTC m=+4.461611154,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.749929 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.18980d25084c1d3a openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler-cert-syncer},},Reason:Started,Message:Started container kube-scheduler-cert-syncer,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.074447162 +0000 UTC m=+4.468848772,LastTimestamp:2026-02-27 08:28:26.074447162 +0000 UTC m=+4.468848772,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.754317 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.18980d250863647a openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler-recovery-controller},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.07597273 +0000 UTC m=+4.470374340,LastTimestamp:2026-02-27 08:28:26.07597273 +0000 UTC m=+4.470374340,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.759771 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d2508f6a404 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-cert-syncer},},Reason:Started,Message:Started container kube-apiserver-cert-syncer,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.085622788 +0000 UTC m=+4.480024398,LastTimestamp:2026-02-27 08:28:26.085622788 +0000 UTC m=+4.480024398,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.765835 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d250907d573 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-cert-regeneration-controller},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.086749555 +0000 UTC m=+4.481151165,LastTimestamp:2026-02-27 08:28:26.086749555 +0000 UTC m=+4.481151165,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.771423 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.18980d25170a2d86 openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler-recovery-controller},},Reason:Created,Message:Created container kube-scheduler-recovery-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.321784198 +0000 UTC m=+4.716185808,LastTimestamp:2026-02-27 08:28:26.321784198 +0000 UTC m=+4.716185808,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.775377 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d25171dc15e openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-cert-regeneration-controller},},Reason:Created,Message:Created container kube-apiserver-cert-regeneration-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.32306723 +0000 UTC m=+4.717468840,LastTimestamp:2026-02-27 08:28:26.32306723 +0000 UTC m=+4.717468840,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.781257 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.18980d251863bd3a openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler-recovery-controller},},Reason:Started,Message:Started container kube-scheduler-recovery-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.344430906 +0000 UTC m=+4.738832516,LastTimestamp:2026-02-27 08:28:26.344430906 +0000 UTC m=+4.738832516,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.787479 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d2518bf2b28 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-cert-regeneration-controller},},Reason:Started,Message:Started container kube-apiserver-cert-regeneration-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.350422824 +0000 UTC m=+4.744824434,LastTimestamp:2026-02-27 08:28:26.350422824 +0000 UTC m=+4.744824434,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.792568 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d2518d4f2d7 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-insecure-readyz},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.351850199 +0000 UTC m=+4.746251809,LastTimestamp:2026-02-27 08:28:26.351850199 +0000 UTC m=+4.746251809,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.796681 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d2525146711 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-insecure-readyz},},Reason:Created,Message:Created container kube-apiserver-insecure-readyz,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.557335313 +0000 UTC m=+4.951736923,LastTimestamp:2026-02-27 08:28:26.557335313 +0000 UTC m=+4.951736923,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.800721 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d2525d746dc openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-insecure-readyz},},Reason:Started,Message:Started container kube-apiserver-insecure-readyz,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.570106588 +0000 UTC m=+4.964508198,LastTimestamp:2026-02-27 08:28:26.570106588 +0000 UTC m=+4.964508198,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.805229 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d2525ed5a5e openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-check-endpoints},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.571553374 +0000 UTC m=+4.965954984,LastTimestamp:2026-02-27 08:28:26.571553374 +0000 UTC m=+4.965954984,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.814090 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d25286ce59f openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{etcd-resources-copy},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.613466527 +0000 UTC m=+5.007868137,LastTimestamp:2026-02-27 08:28:26.613466527 +0000 UTC m=+5.007868137,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.814797 4906 controller.go:145] "Failed to ensure lease exists, will retry" err="leases.coordination.k8s.io \"crc\" is forbidden: User \"system:anonymous\" cannot get resource \"leases\" in API group \"coordination.k8s.io\" in the namespace \"kube-node-lease\"" interval="7s" Feb 27 08:28:58 crc kubenswrapper[4906]: I0227 08:28:58.816084 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:28:58 crc kubenswrapper[4906]: I0227 08:28:58.818182 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:28:58 crc kubenswrapper[4906]: I0227 08:28:58.818235 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:28:58 crc kubenswrapper[4906]: I0227 08:28:58.818247 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:28:58 crc kubenswrapper[4906]: I0227 08:28:58.818284 4906 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.818724 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d253460f091 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-check-endpoints},},Reason:Created,Message:Created container kube-apiserver-check-endpoints,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.814009489 +0000 UTC m=+5.208411109,LastTimestamp:2026-02-27 08:28:26.814009489 +0000 UTC m=+5.208411109,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.822283 4906 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes is forbidden: User \"system:anonymous\" cannot create resource \"nodes\" in API group \"\" at the cluster scope" node="crc" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.822646 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d25348abc71 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{etcd-resources-copy},},Reason:Created,Message:Created container etcd-resources-copy,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.816748657 +0000 UTC m=+5.211150287,LastTimestamp:2026-02-27 08:28:26.816748657 +0000 UTC m=+5.211150287,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.826300 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d2537ed7a92 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-check-endpoints},},Reason:Started,Message:Started container kube-apiserver-check-endpoints,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.873551506 +0000 UTC m=+5.267953126,LastTimestamp:2026-02-27 08:28:26.873551506 +0000 UTC m=+5.267953126,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.831870 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d253a887fac openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{etcd-resources-copy},},Reason:Started,Message:Started container etcd-resources-copy,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.917265324 +0000 UTC m=+5.311666974,LastTimestamp:2026-02-27 08:28:26.917265324 +0000 UTC m=+5.311666974,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.838673 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d2564bda1e6 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcdctl},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:27.625390566 +0000 UTC m=+6.019792176,LastTimestamp:2026-02-27 08:28:27.625390566 +0000 UTC m=+6.019792176,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.843368 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d25752d0e4f openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcdctl},},Reason:Created,Message:Created container etcdctl,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:27.901128271 +0000 UTC m=+6.295529911,LastTimestamp:2026-02-27 08:28:27.901128271 +0000 UTC m=+6.295529911,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.847956 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d2579d8325e openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcdctl},},Reason:Started,Message:Started container etcdctl,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:27.979453022 +0000 UTC m=+6.373854642,LastTimestamp:2026-02-27 08:28:27.979453022 +0000 UTC m=+6.373854642,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.854414 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d2579e9fd17 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:27.980619031 +0000 UTC m=+6.375020651,LastTimestamp:2026-02-27 08:28:27.980619031 +0000 UTC m=+6.375020651,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.859244 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d259181403d openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd},},Reason:Created,Message:Created container etcd,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:28.376408125 +0000 UTC m=+6.770809775,LastTimestamp:2026-02-27 08:28:28.376408125 +0000 UTC m=+6.770809775,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.865024 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d25941a5239 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd},},Reason:Started,Message:Started container etcd,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:28.419994169 +0000 UTC m=+6.814395819,LastTimestamp:2026-02-27 08:28:28.419994169 +0000 UTC m=+6.814395819,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.871148 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d259434f8e4 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-metrics},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:28.421740772 +0000 UTC m=+6.816142382,LastTimestamp:2026-02-27 08:28:28.421740772 +0000 UTC m=+6.816142382,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.878737 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d25ad31ec0a openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-metrics},},Reason:Created,Message:Created container etcd-metrics,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:28.840971274 +0000 UTC m=+7.235372884,LastTimestamp:2026-02-27 08:28:28.840971274 +0000 UTC m=+7.235372884,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.884067 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d25b2e0816f openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-metrics},},Reason:Started,Message:Started container etcd-metrics,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:28.936298863 +0000 UTC m=+7.330700513,LastTimestamp:2026-02-27 08:28:28.936298863 +0000 UTC m=+7.330700513,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.891229 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d25b300e638 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-readyz},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:28.938421816 +0000 UTC m=+7.332823456,LastTimestamp:2026-02-27 08:28:28.938421816 +0000 UTC m=+7.332823456,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.896317 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d25c2eee802 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-readyz},},Reason:Created,Message:Created container etcd-readyz,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:29.205678082 +0000 UTC m=+7.600079702,LastTimestamp:2026-02-27 08:28:29.205678082 +0000 UTC m=+7.600079702,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.901582 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d25c3c97ae8 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-readyz},},Reason:Started,Message:Started container etcd-readyz,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:29.220002536 +0000 UTC m=+7.614404146,LastTimestamp:2026-02-27 08:28:29.220002536 +0000 UTC m=+7.614404146,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.906610 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d25c3deecec openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-rev},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:29.22140798 +0000 UTC m=+7.615809590,LastTimestamp:2026-02-27 08:28:29.22140798 +0000 UTC m=+7.615809590,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.911658 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d25cf8fb2c7 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-rev},},Reason:Created,Message:Created container etcd-rev,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:29.417542343 +0000 UTC m=+7.811943963,LastTimestamp:2026-02-27 08:28:29.417542343 +0000 UTC m=+7.811943963,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.919062 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.18980d25d087bfae openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-rev},},Reason:Started,Message:Started container etcd-rev,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:29.433798574 +0000 UTC m=+7.828200184,LastTimestamp:2026-02-27 08:28:29.433798574 +0000 UTC m=+7.828200184,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.928505 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event=< Feb 27 08:28:58 crc kubenswrapper[4906]: &Event{ObjectMeta:{kube-controller-manager-crc.18980d26b8b5c384 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:ProbeError,Message:Startup probe error: Get "https://192.168.126.11:10357/healthz": context deadline exceeded (Client.Timeout exceeded while awaiting headers) Feb 27 08:28:58 crc kubenswrapper[4906]: body: Feb 27 08:28:58 crc kubenswrapper[4906]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:33.329128324 +0000 UTC m=+11.723529934,LastTimestamp:2026-02-27 08:28:33.329128324 +0000 UTC m=+11.723529934,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Feb 27 08:28:58 crc kubenswrapper[4906]: > Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.935506 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.18980d26b8b6ba8d openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Unhealthy,Message:Startup probe failed: Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers),Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:33.329191565 +0000 UTC m=+11.723593175,LastTimestamp:2026-02-27 08:28:33.329191565 +0000 UTC m=+11.723593175,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.941685 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event=< Feb 27 08:28:58 crc kubenswrapper[4906]: &Event{ObjectMeta:{kube-apiserver-crc.18980d27e6b6f5c4 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:ProbeError,Message:Startup probe error: HTTP probe failed with statuscode: 403 Feb 27 08:28:58 crc kubenswrapper[4906]: body: {"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Feb 27 08:28:58 crc kubenswrapper[4906]: Feb 27 08:28:58 crc kubenswrapper[4906]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:38.395925956 +0000 UTC m=+16.790327566,LastTimestamp:2026-02-27 08:28:38.395925956 +0000 UTC m=+16.790327566,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Feb 27 08:28:58 crc kubenswrapper[4906]: > Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.946988 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d27e6b7b5d7 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:Unhealthy,Message:Startup probe failed: HTTP probe failed with statuscode: 403,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:38.395975127 +0000 UTC m=+16.790376737,LastTimestamp:2026-02-27 08:28:38.395975127 +0000 UTC m=+16.790376737,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.952499 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-apiserver-crc.18980d27e6b6f5c4\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event=< Feb 27 08:28:58 crc kubenswrapper[4906]: &Event{ObjectMeta:{kube-apiserver-crc.18980d27e6b6f5c4 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:ProbeError,Message:Startup probe error: HTTP probe failed with statuscode: 403 Feb 27 08:28:58 crc kubenswrapper[4906]: body: {"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Feb 27 08:28:58 crc kubenswrapper[4906]: Feb 27 08:28:58 crc kubenswrapper[4906]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:38.395925956 +0000 UTC m=+16.790327566,LastTimestamp:2026-02-27 08:28:38.406214639 +0000 UTC m=+16.800616249,Count:2,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Feb 27 08:28:58 crc kubenswrapper[4906]: > Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.954190 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-apiserver-crc.18980d27e6b7b5d7\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d27e6b7b5d7 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:Unhealthy,Message:Startup probe failed: HTTP probe failed with statuscode: 403,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:38.395975127 +0000 UTC m=+16.790376737,LastTimestamp:2026-02-27 08:28:38.406262181 +0000 UTC m=+16.800663791,Count:2,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.958309 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-apiserver-crc.18980d2525ed5a5e\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d2525ed5a5e openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-check-endpoints},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.571553374 +0000 UTC m=+4.965954984,LastTimestamp:2026-02-27 08:28:38.674927912 +0000 UTC m=+17.069329522,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.963617 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-apiserver-crc.18980d253460f091\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d253460f091 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-check-endpoints},},Reason:Created,Message:Created container kube-apiserver-check-endpoints,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.814009489 +0000 UTC m=+5.208411109,LastTimestamp:2026-02-27 08:28:38.888103676 +0000 UTC m=+17.282505286,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.968561 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-apiserver-crc.18980d2537ed7a92\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.18980d2537ed7a92 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-check-endpoints},},Reason:Started,Message:Started container kube-apiserver-check-endpoints,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:26.873551506 +0000 UTC m=+5.267953126,LastTimestamp:2026-02-27 08:28:38.899072936 +0000 UTC m=+17.293474546,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.973977 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event=< Feb 27 08:28:58 crc kubenswrapper[4906]: &Event{ObjectMeta:{kube-controller-manager-crc.18980d290ce088c1 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:ProbeError,Message:Startup probe error: Get "https://192.168.126.11:10357/healthz": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers) Feb 27 08:28:58 crc kubenswrapper[4906]: body: Feb 27 08:28:58 crc kubenswrapper[4906]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:43.331152065 +0000 UTC m=+21.725553695,LastTimestamp:2026-02-27 08:28:43.331152065 +0000 UTC m=+21.725553695,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Feb 27 08:28:58 crc kubenswrapper[4906]: > Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.978091 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.18980d290ce1bee1 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Unhealthy,Message:Startup probe failed: Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers),Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:43.331231457 +0000 UTC m=+21.725633077,LastTimestamp:2026-02-27 08:28:43.331231457 +0000 UTC m=+21.725633077,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.983440 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.18980d290ce088c1\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event=< Feb 27 08:28:58 crc kubenswrapper[4906]: &Event{ObjectMeta:{kube-controller-manager-crc.18980d290ce088c1 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:ProbeError,Message:Startup probe error: Get "https://192.168.126.11:10357/healthz": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers) Feb 27 08:28:58 crc kubenswrapper[4906]: body: Feb 27 08:28:58 crc kubenswrapper[4906]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:43.331152065 +0000 UTC m=+21.725553695,LastTimestamp:2026-02-27 08:28:53.329399714 +0000 UTC m=+31.723801364,Count:2,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Feb 27 08:28:58 crc kubenswrapper[4906]: > Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.989460 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.18980d290ce1bee1\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.18980d290ce1bee1 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Unhealthy,Message:Startup probe failed: Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers),Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:43.331231457 +0000 UTC m=+21.725633077,LastTimestamp:2026-02-27 08:28:53.329520597 +0000 UTC m=+31.723922247,Count:2,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.994821 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.18980d2b610854f4 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Killing,Message:Container cluster-policy-controller failed startup probe, will be restarted,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:53.33298098 +0000 UTC m=+31.727382600,LastTimestamp:2026-02-27 08:28:53.33298098 +0000 UTC m=+31.727382600,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:58 crc kubenswrapper[4906]: E0227 08:28:58.999036 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.18980d24b5c7c9bc\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.18980d24b5c7c9bc openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:24.690043324 +0000 UTC m=+3.084444934,LastTimestamp:2026-02-27 08:28:53.480519049 +0000 UTC m=+31.874920669,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:59 crc kubenswrapper[4906]: E0227 08:28:59.003025 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.18980d24c885f447\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.18980d24c885f447 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Created,Message:Created container cluster-policy-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.004495943 +0000 UTC m=+3.398897583,LastTimestamp:2026-02-27 08:28:53.83712838 +0000 UTC m=+32.231529990,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:59 crc kubenswrapper[4906]: E0227 08:28:59.006788 4906 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.18980d24c97632c5\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.18980d24c97632c5 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Started,Message:Started container cluster-policy-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:28:25.020240581 +0000 UTC m=+3.414642191,LastTimestamp:2026-02-27 08:28:53.858845229 +0000 UTC m=+32.253246839,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:28:59 crc kubenswrapper[4906]: I0227 08:28:59.432491 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:00 crc kubenswrapper[4906]: W0227 08:29:00.191709 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:anonymous" cannot list resource "services" in API group "" at the cluster scope Feb 27 08:29:00 crc kubenswrapper[4906]: E0227 08:29:00.191774 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:anonymous\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError" Feb 27 08:29:00 crc kubenswrapper[4906]: I0227 08:29:00.329774 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:29:00 crc kubenswrapper[4906]: I0227 08:29:00.330063 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:00 crc kubenswrapper[4906]: I0227 08:29:00.333833 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:00 crc kubenswrapper[4906]: I0227 08:29:00.333924 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:00 crc kubenswrapper[4906]: I0227 08:29:00.333941 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:00 crc kubenswrapper[4906]: I0227 08:29:00.428558 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:01 crc kubenswrapper[4906]: I0227 08:29:01.427786 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:01 crc kubenswrapper[4906]: I0227 08:29:01.551389 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:01 crc kubenswrapper[4906]: I0227 08:29:01.552959 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:01 crc kubenswrapper[4906]: I0227 08:29:01.553005 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:01 crc kubenswrapper[4906]: I0227 08:29:01.553018 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:01 crc kubenswrapper[4906]: I0227 08:29:01.553723 4906 scope.go:117] "RemoveContainer" containerID="32e2234430d6a1b634993bea89301366a3eaebd9aeaab10c61294b75fa6b9e8e" Feb 27 08:29:01 crc kubenswrapper[4906]: I0227 08:29:01.941219 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:29:01 crc kubenswrapper[4906]: I0227 08:29:01.941586 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:01 crc kubenswrapper[4906]: I0227 08:29:01.943252 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:01 crc kubenswrapper[4906]: I0227 08:29:01.943294 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:01 crc kubenswrapper[4906]: I0227 08:29:01.943307 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:02 crc kubenswrapper[4906]: I0227 08:29:02.428762 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:02 crc kubenswrapper[4906]: I0227 08:29:02.774533 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 27 08:29:02 crc kubenswrapper[4906]: I0227 08:29:02.776851 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"46e8d2497f8ad2b97401ae68fffd7dffbfede41cd21848d8abdc844cfb503058"} Feb 27 08:29:02 crc kubenswrapper[4906]: I0227 08:29:02.777219 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:02 crc kubenswrapper[4906]: I0227 08:29:02.778533 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:02 crc kubenswrapper[4906]: I0227 08:29:02.778617 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:02 crc kubenswrapper[4906]: I0227 08:29:02.778643 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:02 crc kubenswrapper[4906]: E0227 08:29:02.821569 4906 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 27 08:29:03 crc kubenswrapper[4906]: I0227 08:29:03.237447 4906 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 27 08:29:03 crc kubenswrapper[4906]: I0227 08:29:03.256077 4906 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Feb 27 08:29:03 crc kubenswrapper[4906]: I0227 08:29:03.329806 4906 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded" start-of-body= Feb 27 08:29:03 crc kubenswrapper[4906]: I0227 08:29:03.329985 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded" Feb 27 08:29:03 crc kubenswrapper[4906]: E0227 08:29:03.335670 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event=< Feb 27 08:29:03 crc kubenswrapper[4906]: &Event{ObjectMeta:{kube-controller-manager-crc.18980d2db4e5c6f1 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:ProbeError,Message:Startup probe error: Get "https://192.168.126.11:10357/healthz": context deadline exceeded Feb 27 08:29:03 crc kubenswrapper[4906]: body: Feb 27 08:29:03 crc kubenswrapper[4906]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:29:03.329937137 +0000 UTC m=+41.724338797,LastTimestamp:2026-02-27 08:29:03.329937137 +0000 UTC m=+41.724338797,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Feb 27 08:29:03 crc kubenswrapper[4906]: > Feb 27 08:29:03 crc kubenswrapper[4906]: E0227 08:29:03.342361 4906 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.18980d2db4e74d62 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Unhealthy,Message:Startup probe failed: Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:29:03.33003709 +0000 UTC m=+41.724438740,LastTimestamp:2026-02-27 08:29:03.33003709 +0000 UTC m=+41.724438740,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:29:03 crc kubenswrapper[4906]: I0227 08:29:03.427560 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:03 crc kubenswrapper[4906]: I0227 08:29:03.783117 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/2.log" Feb 27 08:29:03 crc kubenswrapper[4906]: I0227 08:29:03.783791 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 27 08:29:03 crc kubenswrapper[4906]: I0227 08:29:03.787309 4906 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="46e8d2497f8ad2b97401ae68fffd7dffbfede41cd21848d8abdc844cfb503058" exitCode=255 Feb 27 08:29:03 crc kubenswrapper[4906]: I0227 08:29:03.787365 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"46e8d2497f8ad2b97401ae68fffd7dffbfede41cd21848d8abdc844cfb503058"} Feb 27 08:29:03 crc kubenswrapper[4906]: I0227 08:29:03.787464 4906 scope.go:117] "RemoveContainer" containerID="32e2234430d6a1b634993bea89301366a3eaebd9aeaab10c61294b75fa6b9e8e" Feb 27 08:29:03 crc kubenswrapper[4906]: I0227 08:29:03.787684 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:03 crc kubenswrapper[4906]: I0227 08:29:03.790014 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:03 crc kubenswrapper[4906]: I0227 08:29:03.790044 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:03 crc kubenswrapper[4906]: I0227 08:29:03.790059 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:03 crc kubenswrapper[4906]: I0227 08:29:03.790648 4906 scope.go:117] "RemoveContainer" containerID="46e8d2497f8ad2b97401ae68fffd7dffbfede41cd21848d8abdc844cfb503058" Feb 27 08:29:03 crc kubenswrapper[4906]: E0227 08:29:03.790826 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 27 08:29:04 crc kubenswrapper[4906]: I0227 08:29:04.427941 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:04 crc kubenswrapper[4906]: I0227 08:29:04.794437 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/2.log" Feb 27 08:29:05 crc kubenswrapper[4906]: W0227 08:29:05.178191 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes "crc" is forbidden: User "system:anonymous" cannot list resource "nodes" in API group "" at the cluster scope Feb 27 08:29:05 crc kubenswrapper[4906]: E0227 08:29:05.178253 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes \"crc\" is forbidden: User \"system:anonymous\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError" Feb 27 08:29:05 crc kubenswrapper[4906]: I0227 08:29:05.427189 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:05 crc kubenswrapper[4906]: I0227 08:29:05.823139 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:05 crc kubenswrapper[4906]: E0227 08:29:05.824327 4906 controller.go:145] "Failed to ensure lease exists, will retry" err="leases.coordination.k8s.io \"crc\" is forbidden: User \"system:anonymous\" cannot get resource \"leases\" in API group \"coordination.k8s.io\" in the namespace \"kube-node-lease\"" interval="7s" Feb 27 08:29:05 crc kubenswrapper[4906]: I0227 08:29:05.824867 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:05 crc kubenswrapper[4906]: I0227 08:29:05.824948 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:05 crc kubenswrapper[4906]: I0227 08:29:05.824963 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:05 crc kubenswrapper[4906]: I0227 08:29:05.825000 4906 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 27 08:29:05 crc kubenswrapper[4906]: E0227 08:29:05.831804 4906 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes is forbidden: User \"system:anonymous\" cannot create resource \"nodes\" in API group \"\" at the cluster scope" node="crc" Feb 27 08:29:06 crc kubenswrapper[4906]: I0227 08:29:06.427620 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:07 crc kubenswrapper[4906]: I0227 08:29:07.431054 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:08 crc kubenswrapper[4906]: I0227 08:29:08.428674 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:08 crc kubenswrapper[4906]: I0227 08:29:08.673271 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:29:08 crc kubenswrapper[4906]: I0227 08:29:08.673545 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:08 crc kubenswrapper[4906]: I0227 08:29:08.674951 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:08 crc kubenswrapper[4906]: I0227 08:29:08.674998 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:08 crc kubenswrapper[4906]: I0227 08:29:08.675014 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:08 crc kubenswrapper[4906]: I0227 08:29:08.675697 4906 scope.go:117] "RemoveContainer" containerID="46e8d2497f8ad2b97401ae68fffd7dffbfede41cd21848d8abdc844cfb503058" Feb 27 08:29:08 crc kubenswrapper[4906]: E0227 08:29:08.675957 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 27 08:29:09 crc kubenswrapper[4906]: I0227 08:29:09.426439 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:10 crc kubenswrapper[4906]: I0227 08:29:10.340267 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:29:10 crc kubenswrapper[4906]: I0227 08:29:10.340437 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:10 crc kubenswrapper[4906]: I0227 08:29:10.341681 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:10 crc kubenswrapper[4906]: I0227 08:29:10.341741 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:10 crc kubenswrapper[4906]: I0227 08:29:10.341754 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:10 crc kubenswrapper[4906]: I0227 08:29:10.344257 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:29:10 crc kubenswrapper[4906]: I0227 08:29:10.429227 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:10 crc kubenswrapper[4906]: W0227 08:29:10.511655 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: runtimeclasses.node.k8s.io is forbidden: User "system:anonymous" cannot list resource "runtimeclasses" in API group "node.k8s.io" at the cluster scope Feb 27 08:29:10 crc kubenswrapper[4906]: E0227 08:29:10.511709 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: runtimeclasses.node.k8s.io is forbidden: User \"system:anonymous\" cannot list resource \"runtimeclasses\" in API group \"node.k8s.io\" at the cluster scope" logger="UnhandledError" Feb 27 08:29:10 crc kubenswrapper[4906]: I0227 08:29:10.812555 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:10 crc kubenswrapper[4906]: I0227 08:29:10.813744 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:10 crc kubenswrapper[4906]: I0227 08:29:10.813837 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:10 crc kubenswrapper[4906]: I0227 08:29:10.813922 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:11 crc kubenswrapper[4906]: I0227 08:29:11.429719 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:12 crc kubenswrapper[4906]: I0227 08:29:12.427060 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:12 crc kubenswrapper[4906]: E0227 08:29:12.822582 4906 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 27 08:29:12 crc kubenswrapper[4906]: E0227 08:29:12.828713 4906 controller.go:145] "Failed to ensure lease exists, will retry" err="leases.coordination.k8s.io \"crc\" is forbidden: User \"system:anonymous\" cannot get resource \"leases\" in API group \"coordination.k8s.io\" in the namespace \"kube-node-lease\"" interval="7s" Feb 27 08:29:12 crc kubenswrapper[4906]: I0227 08:29:12.832015 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:12 crc kubenswrapper[4906]: I0227 08:29:12.833000 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:12 crc kubenswrapper[4906]: I0227 08:29:12.833046 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:12 crc kubenswrapper[4906]: I0227 08:29:12.833060 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:12 crc kubenswrapper[4906]: I0227 08:29:12.833086 4906 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 27 08:29:12 crc kubenswrapper[4906]: E0227 08:29:12.838120 4906 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes is forbidden: User \"system:anonymous\" cannot create resource \"nodes\" in API group \"\" at the cluster scope" node="crc" Feb 27 08:29:13 crc kubenswrapper[4906]: I0227 08:29:13.417110 4906 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:29:13 crc kubenswrapper[4906]: I0227 08:29:13.418264 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:13 crc kubenswrapper[4906]: I0227 08:29:13.419953 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:13 crc kubenswrapper[4906]: I0227 08:29:13.420059 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:13 crc kubenswrapper[4906]: I0227 08:29:13.420463 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:13 crc kubenswrapper[4906]: I0227 08:29:13.421274 4906 scope.go:117] "RemoveContainer" containerID="46e8d2497f8ad2b97401ae68fffd7dffbfede41cd21848d8abdc844cfb503058" Feb 27 08:29:13 crc kubenswrapper[4906]: E0227 08:29:13.421471 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 27 08:29:13 crc kubenswrapper[4906]: I0227 08:29:13.426121 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:14 crc kubenswrapper[4906]: I0227 08:29:14.427181 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:15 crc kubenswrapper[4906]: I0227 08:29:15.429737 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:15 crc kubenswrapper[4906]: W0227 08:29:15.716474 4906 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:anonymous" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:15 crc kubenswrapper[4906]: E0227 08:29:15.716544 4906 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:anonymous\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError" Feb 27 08:29:16 crc kubenswrapper[4906]: I0227 08:29:16.430104 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:16 crc kubenswrapper[4906]: I0227 08:29:16.841984 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 27 08:29:16 crc kubenswrapper[4906]: I0227 08:29:16.842210 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:16 crc kubenswrapper[4906]: I0227 08:29:16.843854 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:16 crc kubenswrapper[4906]: I0227 08:29:16.843942 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:16 crc kubenswrapper[4906]: I0227 08:29:16.843954 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:17 crc kubenswrapper[4906]: I0227 08:29:17.430525 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:18 crc kubenswrapper[4906]: I0227 08:29:18.428069 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:19 crc kubenswrapper[4906]: I0227 08:29:19.430332 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:19 crc kubenswrapper[4906]: E0227 08:29:19.837106 4906 controller.go:145] "Failed to ensure lease exists, will retry" err="leases.coordination.k8s.io \"crc\" is forbidden: User \"system:anonymous\" cannot get resource \"leases\" in API group \"coordination.k8s.io\" in the namespace \"kube-node-lease\"" interval="7s" Feb 27 08:29:19 crc kubenswrapper[4906]: I0227 08:29:19.839098 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:19 crc kubenswrapper[4906]: I0227 08:29:19.840264 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:19 crc kubenswrapper[4906]: I0227 08:29:19.840314 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:19 crc kubenswrapper[4906]: I0227 08:29:19.840328 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:19 crc kubenswrapper[4906]: I0227 08:29:19.840359 4906 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 27 08:29:19 crc kubenswrapper[4906]: E0227 08:29:19.845988 4906 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes is forbidden: User \"system:anonymous\" cannot create resource \"nodes\" in API group \"\" at the cluster scope" node="crc" Feb 27 08:29:20 crc kubenswrapper[4906]: I0227 08:29:20.430069 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:21 crc kubenswrapper[4906]: I0227 08:29:21.428967 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:22 crc kubenswrapper[4906]: I0227 08:29:22.430635 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:22 crc kubenswrapper[4906]: E0227 08:29:22.823008 4906 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 27 08:29:23 crc kubenswrapper[4906]: I0227 08:29:23.427670 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:24 crc kubenswrapper[4906]: I0227 08:29:24.427386 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:25 crc kubenswrapper[4906]: I0227 08:29:25.428391 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:26 crc kubenswrapper[4906]: I0227 08:29:26.430341 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:26 crc kubenswrapper[4906]: I0227 08:29:26.552265 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:26 crc kubenswrapper[4906]: I0227 08:29:26.553694 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:26 crc kubenswrapper[4906]: I0227 08:29:26.553748 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:26 crc kubenswrapper[4906]: I0227 08:29:26.553761 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:26 crc kubenswrapper[4906]: I0227 08:29:26.554430 4906 scope.go:117] "RemoveContainer" containerID="46e8d2497f8ad2b97401ae68fffd7dffbfede41cd21848d8abdc844cfb503058" Feb 27 08:29:26 crc kubenswrapper[4906]: E0227 08:29:26.842183 4906 controller.go:145] "Failed to ensure lease exists, will retry" err="leases.coordination.k8s.io \"crc\" is forbidden: User \"system:anonymous\" cannot get resource \"leases\" in API group \"coordination.k8s.io\" in the namespace \"kube-node-lease\"" interval="7s" Feb 27 08:29:26 crc kubenswrapper[4906]: I0227 08:29:26.846295 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:26 crc kubenswrapper[4906]: I0227 08:29:26.851678 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:26 crc kubenswrapper[4906]: I0227 08:29:26.851720 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:26 crc kubenswrapper[4906]: I0227 08:29:26.851730 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:26 crc kubenswrapper[4906]: I0227 08:29:26.851766 4906 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 27 08:29:26 crc kubenswrapper[4906]: E0227 08:29:26.859280 4906 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes is forbidden: User \"system:anonymous\" cannot create resource \"nodes\" in API group \"\" at the cluster scope" node="crc" Feb 27 08:29:27 crc kubenswrapper[4906]: I0227 08:29:27.428173 4906 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Feb 27 08:29:27 crc kubenswrapper[4906]: I0227 08:29:27.857774 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/3.log" Feb 27 08:29:27 crc kubenswrapper[4906]: I0227 08:29:27.858244 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/2.log" Feb 27 08:29:27 crc kubenswrapper[4906]: I0227 08:29:27.860589 4906 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5" exitCode=255 Feb 27 08:29:27 crc kubenswrapper[4906]: I0227 08:29:27.860656 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5"} Feb 27 08:29:27 crc kubenswrapper[4906]: I0227 08:29:27.860764 4906 scope.go:117] "RemoveContainer" containerID="46e8d2497f8ad2b97401ae68fffd7dffbfede41cd21848d8abdc844cfb503058" Feb 27 08:29:27 crc kubenswrapper[4906]: I0227 08:29:27.860987 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:27 crc kubenswrapper[4906]: I0227 08:29:27.862557 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:27 crc kubenswrapper[4906]: I0227 08:29:27.862590 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:27 crc kubenswrapper[4906]: I0227 08:29:27.862602 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:27 crc kubenswrapper[4906]: I0227 08:29:27.863270 4906 scope.go:117] "RemoveContainer" containerID="235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5" Feb 27 08:29:27 crc kubenswrapper[4906]: E0227 08:29:27.863459 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 27 08:29:28 crc kubenswrapper[4906]: I0227 08:29:28.138632 4906 csr.go:261] certificate signing request csr-zzmcd is approved, waiting to be issued Feb 27 08:29:28 crc kubenswrapper[4906]: I0227 08:29:28.159965 4906 csr.go:257] certificate signing request csr-zzmcd is issued Feb 27 08:29:28 crc kubenswrapper[4906]: I0227 08:29:28.263605 4906 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Feb 27 08:29:28 crc kubenswrapper[4906]: I0227 08:29:28.673544 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:29:28 crc kubenswrapper[4906]: I0227 08:29:28.865619 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/3.log" Feb 27 08:29:28 crc kubenswrapper[4906]: I0227 08:29:28.867608 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:28 crc kubenswrapper[4906]: I0227 08:29:28.868530 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:28 crc kubenswrapper[4906]: I0227 08:29:28.868586 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:28 crc kubenswrapper[4906]: I0227 08:29:28.868601 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:28 crc kubenswrapper[4906]: I0227 08:29:28.869354 4906 scope.go:117] "RemoveContainer" containerID="235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5" Feb 27 08:29:28 crc kubenswrapper[4906]: E0227 08:29:28.869559 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 27 08:29:29 crc kubenswrapper[4906]: I0227 08:29:29.104207 4906 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Feb 27 08:29:29 crc kubenswrapper[4906]: I0227 08:29:29.161926 4906 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-02-24 05:54:36 +0000 UTC, rotation deadline is 2026-11-13 19:57:10.731174366 +0000 UTC Feb 27 08:29:29 crc kubenswrapper[4906]: I0227 08:29:29.161980 4906 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6227h27m41.569197776s for next certificate rotation Feb 27 08:29:32 crc kubenswrapper[4906]: E0227 08:29:32.823823 4906 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.416645 4906 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.416795 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.417922 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.417960 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.417972 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.418511 4906 scope.go:117] "RemoveContainer" containerID="235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5" Feb 27 08:29:33 crc kubenswrapper[4906]: E0227 08:29:33.418762 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.860325 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.861609 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.861674 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.861698 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.861943 4906 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.869947 4906 kubelet_node_status.go:115] "Node was previously registered" node="crc" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.870236 4906 kubelet_node_status.go:79] "Successfully registered node" node="crc" Feb 27 08:29:33 crc kubenswrapper[4906]: E0227 08:29:33.870270 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": node \"crc\" not found" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.874409 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.874447 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.874475 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.874490 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.874499 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:33Z","lastTransitionTime":"2026-02-27T08:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:33 crc kubenswrapper[4906]: E0227 08:29:33.898923 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.912536 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.912586 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.912598 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.912617 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.912627 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:33Z","lastTransitionTime":"2026-02-27T08:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:33 crc kubenswrapper[4906]: E0227 08:29:33.924617 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.933430 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.933483 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.933494 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.933514 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.933525 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:33Z","lastTransitionTime":"2026-02-27T08:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:33 crc kubenswrapper[4906]: E0227 08:29:33.944636 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.954036 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.954066 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.954078 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.954104 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:33 crc kubenswrapper[4906]: I0227 08:29:33.954119 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:33Z","lastTransitionTime":"2026-02-27T08:29:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:33 crc kubenswrapper[4906]: E0227 08:29:33.967613 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:33Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:33 crc kubenswrapper[4906]: E0227 08:29:33.967722 4906 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 27 08:29:33 crc kubenswrapper[4906]: E0227 08:29:33.967748 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:34 crc kubenswrapper[4906]: E0227 08:29:34.068454 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:34 crc kubenswrapper[4906]: E0227 08:29:34.169397 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:34 crc kubenswrapper[4906]: E0227 08:29:34.270388 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:34 crc kubenswrapper[4906]: E0227 08:29:34.370491 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:34 crc kubenswrapper[4906]: E0227 08:29:34.471376 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:34 crc kubenswrapper[4906]: E0227 08:29:34.571975 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:34 crc kubenswrapper[4906]: E0227 08:29:34.672873 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:34 crc kubenswrapper[4906]: E0227 08:29:34.773664 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:34 crc kubenswrapper[4906]: E0227 08:29:34.874656 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:34 crc kubenswrapper[4906]: E0227 08:29:34.975651 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:35 crc kubenswrapper[4906]: E0227 08:29:35.076588 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:35 crc kubenswrapper[4906]: E0227 08:29:35.177148 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:35 crc kubenswrapper[4906]: E0227 08:29:35.278162 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:35 crc kubenswrapper[4906]: E0227 08:29:35.378840 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:35 crc kubenswrapper[4906]: E0227 08:29:35.480060 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:35 crc kubenswrapper[4906]: E0227 08:29:35.581507 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:35 crc kubenswrapper[4906]: E0227 08:29:35.681680 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:35 crc kubenswrapper[4906]: E0227 08:29:35.782626 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:35 crc kubenswrapper[4906]: E0227 08:29:35.883624 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:35 crc kubenswrapper[4906]: E0227 08:29:35.983737 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:36 crc kubenswrapper[4906]: E0227 08:29:36.084842 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:36 crc kubenswrapper[4906]: E0227 08:29:36.185295 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:36 crc kubenswrapper[4906]: E0227 08:29:36.286345 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:36 crc kubenswrapper[4906]: E0227 08:29:36.387294 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:36 crc kubenswrapper[4906]: E0227 08:29:36.487498 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:36 crc kubenswrapper[4906]: E0227 08:29:36.587847 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:36 crc kubenswrapper[4906]: E0227 08:29:36.688572 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:36 crc kubenswrapper[4906]: E0227 08:29:36.789092 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:36 crc kubenswrapper[4906]: E0227 08:29:36.889701 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:36 crc kubenswrapper[4906]: E0227 08:29:36.990936 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:37 crc kubenswrapper[4906]: E0227 08:29:37.091682 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:37 crc kubenswrapper[4906]: E0227 08:29:37.192488 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:37 crc kubenswrapper[4906]: E0227 08:29:37.293619 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:37 crc kubenswrapper[4906]: E0227 08:29:37.394423 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:37 crc kubenswrapper[4906]: E0227 08:29:37.495486 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:37 crc kubenswrapper[4906]: I0227 08:29:37.520474 4906 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Feb 27 08:29:37 crc kubenswrapper[4906]: E0227 08:29:37.596157 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:37 crc kubenswrapper[4906]: E0227 08:29:37.696659 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:37 crc kubenswrapper[4906]: E0227 08:29:37.797450 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:37 crc kubenswrapper[4906]: E0227 08:29:37.897902 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:37 crc kubenswrapper[4906]: E0227 08:29:37.998680 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:38 crc kubenswrapper[4906]: E0227 08:29:38.099681 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:38 crc kubenswrapper[4906]: E0227 08:29:38.200860 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:38 crc kubenswrapper[4906]: E0227 08:29:38.301905 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:38 crc kubenswrapper[4906]: E0227 08:29:38.402283 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:38 crc kubenswrapper[4906]: E0227 08:29:38.503000 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:38 crc kubenswrapper[4906]: E0227 08:29:38.604193 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:38 crc kubenswrapper[4906]: E0227 08:29:38.705194 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:38 crc kubenswrapper[4906]: E0227 08:29:38.805992 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:38 crc kubenswrapper[4906]: E0227 08:29:38.906453 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:39 crc kubenswrapper[4906]: E0227 08:29:39.007567 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:39 crc kubenswrapper[4906]: E0227 08:29:39.108565 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:39 crc kubenswrapper[4906]: E0227 08:29:39.209793 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:39 crc kubenswrapper[4906]: E0227 08:29:39.310663 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:39 crc kubenswrapper[4906]: E0227 08:29:39.410953 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:39 crc kubenswrapper[4906]: E0227 08:29:39.511612 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:39 crc kubenswrapper[4906]: E0227 08:29:39.612740 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:39 crc kubenswrapper[4906]: E0227 08:29:39.713144 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:39 crc kubenswrapper[4906]: E0227 08:29:39.814225 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:39 crc kubenswrapper[4906]: E0227 08:29:39.914705 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:40 crc kubenswrapper[4906]: E0227 08:29:40.015993 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:40 crc kubenswrapper[4906]: E0227 08:29:40.116763 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:40 crc kubenswrapper[4906]: E0227 08:29:40.217516 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:40 crc kubenswrapper[4906]: E0227 08:29:40.317725 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:40 crc kubenswrapper[4906]: E0227 08:29:40.418847 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:40 crc kubenswrapper[4906]: E0227 08:29:40.520082 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:40 crc kubenswrapper[4906]: E0227 08:29:40.620633 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:40 crc kubenswrapper[4906]: E0227 08:29:40.721081 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:40 crc kubenswrapper[4906]: E0227 08:29:40.821542 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:40 crc kubenswrapper[4906]: E0227 08:29:40.923040 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:41 crc kubenswrapper[4906]: E0227 08:29:41.024027 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:41 crc kubenswrapper[4906]: E0227 08:29:41.125133 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:41 crc kubenswrapper[4906]: E0227 08:29:41.225905 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:41 crc kubenswrapper[4906]: E0227 08:29:41.326626 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:41 crc kubenswrapper[4906]: E0227 08:29:41.427033 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:41 crc kubenswrapper[4906]: E0227 08:29:41.527296 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:41 crc kubenswrapper[4906]: E0227 08:29:41.628236 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:41 crc kubenswrapper[4906]: E0227 08:29:41.728932 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:41 crc kubenswrapper[4906]: E0227 08:29:41.829272 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:41 crc kubenswrapper[4906]: E0227 08:29:41.930153 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:42 crc kubenswrapper[4906]: E0227 08:29:42.030556 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:42 crc kubenswrapper[4906]: E0227 08:29:42.130916 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:42 crc kubenswrapper[4906]: E0227 08:29:42.231117 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:42 crc kubenswrapper[4906]: E0227 08:29:42.331929 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:42 crc kubenswrapper[4906]: E0227 08:29:42.432107 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:42 crc kubenswrapper[4906]: E0227 08:29:42.532755 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:42 crc kubenswrapper[4906]: E0227 08:29:42.633854 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:42 crc kubenswrapper[4906]: E0227 08:29:42.735171 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:42 crc kubenswrapper[4906]: E0227 08:29:42.825039 4906 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 27 08:29:42 crc kubenswrapper[4906]: E0227 08:29:42.835364 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:42 crc kubenswrapper[4906]: E0227 08:29:42.935867 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:43 crc kubenswrapper[4906]: E0227 08:29:43.037078 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:43 crc kubenswrapper[4906]: E0227 08:29:43.138002 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:43 crc kubenswrapper[4906]: E0227 08:29:43.238349 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:43 crc kubenswrapper[4906]: E0227 08:29:43.338755 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:43 crc kubenswrapper[4906]: E0227 08:29:43.439417 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:43 crc kubenswrapper[4906]: E0227 08:29:43.540028 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:43 crc kubenswrapper[4906]: I0227 08:29:43.551480 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:43 crc kubenswrapper[4906]: I0227 08:29:43.552965 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:43 crc kubenswrapper[4906]: I0227 08:29:43.553028 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:43 crc kubenswrapper[4906]: I0227 08:29:43.553051 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:43 crc kubenswrapper[4906]: E0227 08:29:43.640985 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:43 crc kubenswrapper[4906]: E0227 08:29:43.741693 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:43 crc kubenswrapper[4906]: E0227 08:29:43.842092 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:43 crc kubenswrapper[4906]: E0227 08:29:43.943006 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:44 crc kubenswrapper[4906]: E0227 08:29:44.043489 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:44 crc kubenswrapper[4906]: E0227 08:29:44.144369 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:44 crc kubenswrapper[4906]: E0227 08:29:44.189412 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": node \"crc\" not found" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.195979 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.196396 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.196506 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.196620 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.196699 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:44Z","lastTransitionTime":"2026-02-27T08:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:44 crc kubenswrapper[4906]: E0227 08:29:44.213043 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.218733 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.218791 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.218809 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.218836 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.218857 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:44Z","lastTransitionTime":"2026-02-27T08:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:44 crc kubenswrapper[4906]: E0227 08:29:44.231334 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.236589 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.236623 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.236638 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.236657 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.236683 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:44Z","lastTransitionTime":"2026-02-27T08:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:44 crc kubenswrapper[4906]: E0227 08:29:44.252768 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.257645 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.257714 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.257779 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.257816 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.257839 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:44Z","lastTransitionTime":"2026-02-27T08:29:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:44 crc kubenswrapper[4906]: E0227 08:29:44.269427 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:44 crc kubenswrapper[4906]: E0227 08:29:44.269723 4906 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 27 08:29:44 crc kubenswrapper[4906]: E0227 08:29:44.269825 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:44 crc kubenswrapper[4906]: E0227 08:29:44.371021 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:44 crc kubenswrapper[4906]: I0227 08:29:44.440173 4906 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Feb 27 08:29:44 crc kubenswrapper[4906]: E0227 08:29:44.472542 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:44 crc kubenswrapper[4906]: E0227 08:29:44.573302 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:44 crc kubenswrapper[4906]: E0227 08:29:44.674218 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:44 crc kubenswrapper[4906]: E0227 08:29:44.775300 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:44 crc kubenswrapper[4906]: E0227 08:29:44.875465 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:44 crc kubenswrapper[4906]: E0227 08:29:44.975869 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:45 crc kubenswrapper[4906]: E0227 08:29:45.076653 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:45 crc kubenswrapper[4906]: E0227 08:29:45.176794 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:45 crc kubenswrapper[4906]: E0227 08:29:45.277844 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:45 crc kubenswrapper[4906]: E0227 08:29:45.378777 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:45 crc kubenswrapper[4906]: E0227 08:29:45.478955 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:45 crc kubenswrapper[4906]: E0227 08:29:45.579796 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:45 crc kubenswrapper[4906]: E0227 08:29:45.679981 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:45 crc kubenswrapper[4906]: E0227 08:29:45.780618 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:45 crc kubenswrapper[4906]: E0227 08:29:45.881394 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:45 crc kubenswrapper[4906]: E0227 08:29:45.982163 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:46 crc kubenswrapper[4906]: E0227 08:29:46.082970 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:46 crc kubenswrapper[4906]: E0227 08:29:46.183523 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:46 crc kubenswrapper[4906]: E0227 08:29:46.284651 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:46 crc kubenswrapper[4906]: E0227 08:29:46.385219 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:46 crc kubenswrapper[4906]: E0227 08:29:46.486289 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:46 crc kubenswrapper[4906]: I0227 08:29:46.551344 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:46 crc kubenswrapper[4906]: I0227 08:29:46.552777 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:46 crc kubenswrapper[4906]: I0227 08:29:46.552848 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:46 crc kubenswrapper[4906]: I0227 08:29:46.552912 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:46 crc kubenswrapper[4906]: I0227 08:29:46.553873 4906 scope.go:117] "RemoveContainer" containerID="235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5" Feb 27 08:29:46 crc kubenswrapper[4906]: E0227 08:29:46.554231 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 27 08:29:46 crc kubenswrapper[4906]: E0227 08:29:46.587081 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:46 crc kubenswrapper[4906]: E0227 08:29:46.687608 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:46 crc kubenswrapper[4906]: E0227 08:29:46.788816 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:46 crc kubenswrapper[4906]: E0227 08:29:46.889216 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:46 crc kubenswrapper[4906]: E0227 08:29:46.989656 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:47 crc kubenswrapper[4906]: E0227 08:29:47.090962 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:47 crc kubenswrapper[4906]: E0227 08:29:47.191135 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:47 crc kubenswrapper[4906]: E0227 08:29:47.292047 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:47 crc kubenswrapper[4906]: E0227 08:29:47.392568 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:47 crc kubenswrapper[4906]: I0227 08:29:47.412525 4906 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Feb 27 08:29:47 crc kubenswrapper[4906]: E0227 08:29:47.494146 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:47 crc kubenswrapper[4906]: E0227 08:29:47.594366 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:47 crc kubenswrapper[4906]: E0227 08:29:47.694990 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:47 crc kubenswrapper[4906]: E0227 08:29:47.796187 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:47 crc kubenswrapper[4906]: E0227 08:29:47.896329 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:47 crc kubenswrapper[4906]: E0227 08:29:47.997299 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:48 crc kubenswrapper[4906]: E0227 08:29:48.097862 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:48 crc kubenswrapper[4906]: E0227 08:29:48.198056 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:48 crc kubenswrapper[4906]: E0227 08:29:48.299235 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:48 crc kubenswrapper[4906]: E0227 08:29:48.400313 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:48 crc kubenswrapper[4906]: E0227 08:29:48.500957 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:48 crc kubenswrapper[4906]: E0227 08:29:48.602690 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:48 crc kubenswrapper[4906]: E0227 08:29:48.703169 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:48 crc kubenswrapper[4906]: E0227 08:29:48.803487 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:48 crc kubenswrapper[4906]: E0227 08:29:48.904544 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:49 crc kubenswrapper[4906]: E0227 08:29:49.006043 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:49 crc kubenswrapper[4906]: E0227 08:29:49.107083 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:49 crc kubenswrapper[4906]: E0227 08:29:49.207741 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:49 crc kubenswrapper[4906]: E0227 08:29:49.307938 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:49 crc kubenswrapper[4906]: E0227 08:29:49.408213 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:49 crc kubenswrapper[4906]: E0227 08:29:49.508794 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:49 crc kubenswrapper[4906]: E0227 08:29:49.609080 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:49 crc kubenswrapper[4906]: E0227 08:29:49.709692 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:49 crc kubenswrapper[4906]: E0227 08:29:49.810822 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:49 crc kubenswrapper[4906]: E0227 08:29:49.911680 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:50 crc kubenswrapper[4906]: E0227 08:29:50.012600 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:50 crc kubenswrapper[4906]: E0227 08:29:50.112980 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:50 crc kubenswrapper[4906]: E0227 08:29:50.213736 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:50 crc kubenswrapper[4906]: E0227 08:29:50.314279 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:50 crc kubenswrapper[4906]: E0227 08:29:50.414906 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:50 crc kubenswrapper[4906]: E0227 08:29:50.515791 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:50 crc kubenswrapper[4906]: I0227 08:29:50.551486 4906 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 27 08:29:50 crc kubenswrapper[4906]: I0227 08:29:50.553369 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:50 crc kubenswrapper[4906]: I0227 08:29:50.553422 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:50 crc kubenswrapper[4906]: I0227 08:29:50.553438 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:50 crc kubenswrapper[4906]: E0227 08:29:50.616091 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:50 crc kubenswrapper[4906]: E0227 08:29:50.716852 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:50 crc kubenswrapper[4906]: E0227 08:29:50.817957 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:50 crc kubenswrapper[4906]: E0227 08:29:50.918731 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:51 crc kubenswrapper[4906]: E0227 08:29:51.018975 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:51 crc kubenswrapper[4906]: E0227 08:29:51.119384 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:51 crc kubenswrapper[4906]: E0227 08:29:51.220347 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:51 crc kubenswrapper[4906]: E0227 08:29:51.321084 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:51 crc kubenswrapper[4906]: E0227 08:29:51.422271 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:51 crc kubenswrapper[4906]: E0227 08:29:51.523305 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:51 crc kubenswrapper[4906]: E0227 08:29:51.623440 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:51 crc kubenswrapper[4906]: E0227 08:29:51.723947 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:51 crc kubenswrapper[4906]: E0227 08:29:51.824100 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:51 crc kubenswrapper[4906]: E0227 08:29:51.924944 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:52 crc kubenswrapper[4906]: E0227 08:29:52.025247 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:52 crc kubenswrapper[4906]: E0227 08:29:52.126053 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:52 crc kubenswrapper[4906]: E0227 08:29:52.226464 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:52 crc kubenswrapper[4906]: E0227 08:29:52.327211 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:52 crc kubenswrapper[4906]: E0227 08:29:52.427891 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:52 crc kubenswrapper[4906]: E0227 08:29:52.528509 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:52 crc kubenswrapper[4906]: E0227 08:29:52.629661 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:52 crc kubenswrapper[4906]: E0227 08:29:52.730647 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:52 crc kubenswrapper[4906]: E0227 08:29:52.825437 4906 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 27 08:29:52 crc kubenswrapper[4906]: E0227 08:29:52.830737 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:52 crc kubenswrapper[4906]: E0227 08:29:52.931021 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:53 crc kubenswrapper[4906]: E0227 08:29:53.031812 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:53 crc kubenswrapper[4906]: E0227 08:29:53.132299 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:53 crc kubenswrapper[4906]: E0227 08:29:53.233430 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:53 crc kubenswrapper[4906]: E0227 08:29:53.334385 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:53 crc kubenswrapper[4906]: E0227 08:29:53.435393 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:53 crc kubenswrapper[4906]: E0227 08:29:53.535941 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:53 crc kubenswrapper[4906]: E0227 08:29:53.636047 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:53 crc kubenswrapper[4906]: E0227 08:29:53.736572 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:53 crc kubenswrapper[4906]: E0227 08:29:53.837568 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:53 crc kubenswrapper[4906]: E0227 08:29:53.938670 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.040099 4906 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.110365 4906 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.142709 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.142753 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.142767 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.142786 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.142798 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:54Z","lastTransitionTime":"2026-02-27T08:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.245023 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.245066 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.245078 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.245093 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.245104 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:54Z","lastTransitionTime":"2026-02-27T08:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.347263 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.347294 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.347302 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.347314 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.347323 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:54Z","lastTransitionTime":"2026-02-27T08:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.352440 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.352509 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.352528 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.352554 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.352569 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:54Z","lastTransitionTime":"2026-02-27T08:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.367012 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.370769 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.370817 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.370829 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.370846 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.370858 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:54Z","lastTransitionTime":"2026-02-27T08:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.394729 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.399140 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.399182 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.399195 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.399210 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.399221 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:54Z","lastTransitionTime":"2026-02-27T08:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.412964 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.417701 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.417737 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.417746 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.417762 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.417771 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:54Z","lastTransitionTime":"2026-02-27T08:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.431724 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.435519 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.435547 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.435555 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.435570 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.435579 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:54Z","lastTransitionTime":"2026-02-27T08:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.446790 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.446933 4906 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.449105 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.449143 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.449157 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.449174 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.449187 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:54Z","lastTransitionTime":"2026-02-27T08:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.449645 4906 apiserver.go:52] "Watching apiserver" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.458285 4906 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.458605 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-machine-config-operator/machine-config-daemon-2s5wg","openshift-multus/network-metrics-daemon-6rvgh","openshift-network-diagnostics/network-check-target-xd92c","openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd","openshift-dns/node-resolver-24rf6","openshift-multus/multus-additional-cni-plugins-9cqzh","openshift-ovn-kubernetes/ovnkube-node-lck5x","openshift-multus/multus-6nxxh","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-image-registry/node-ca-wxkxk","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.458945 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.458987 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.459073 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.459001 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.459392 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.459455 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.459486 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.459503 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.459547 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.459547 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-24rf6" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.459582 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.459615 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.459625 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.460698 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.460858 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.461320 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.461362 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.461414 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-wxkxk" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.461585 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.465017 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.465191 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.465251 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.465483 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.465555 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.465736 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.465840 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.466041 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.466152 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.466215 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.466281 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.466318 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.466380 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.466418 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.466664 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.472758 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.472812 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.472845 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.472948 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.473000 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.472845 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.473093 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.473125 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.473134 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.473226 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.473386 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.473424 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.473498 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.473590 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.473602 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.473639 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.473658 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.473656 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.473825 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.474066 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.474353 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.490239 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.500263 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.511427 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.523604 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.532357 4906 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.537974 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.550330 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.550993 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.551023 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.551035 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.551052 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.551062 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:54Z","lastTransitionTime":"2026-02-27T08:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.561107 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.574540 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.585724 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.601693 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.610581 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.618920 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621242 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621289 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621316 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621342 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621364 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621384 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621408 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621429 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621450 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621471 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621496 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621514 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621529 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621549 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621572 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621594 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621614 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621631 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621646 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621660 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621678 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621695 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621711 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621725 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621740 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621757 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621775 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621789 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621806 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621821 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621835 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621850 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621866 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621908 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621935 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621959 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621984 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622008 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622030 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622044 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622060 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622074 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622088 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622103 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622117 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622133 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622148 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622191 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622209 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622230 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622254 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622277 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622294 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622311 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622332 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622354 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622372 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622387 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622403 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622424 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622457 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622479 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622499 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622522 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622546 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622566 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622589 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622613 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622640 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622662 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622682 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622703 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622728 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622755 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622776 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622797 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622819 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622839 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622860 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622900 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622948 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622965 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622982 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622997 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623012 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623028 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623044 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623060 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623074 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623089 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623106 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623123 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623139 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623157 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623173 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623189 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623210 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623233 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623264 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623285 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623301 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623316 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623333 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623351 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623366 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623382 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623396 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623411 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623428 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623444 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623460 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623475 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623489 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623506 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623524 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623542 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623559 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623574 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623590 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623607 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623622 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623647 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623663 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623678 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623694 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623709 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623726 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623741 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623756 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623773 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623791 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623807 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623823 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623840 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623856 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623872 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623905 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623921 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623938 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623955 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623971 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623988 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624005 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624021 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624039 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624056 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624075 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624091 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624107 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624124 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624140 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624156 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624172 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624189 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624206 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624221 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624238 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624256 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624272 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624287 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624305 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624329 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624354 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624378 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624397 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624415 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624436 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624457 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624481 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624504 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624595 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624626 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624644 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624662 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624678 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624695 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624712 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624731 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624751 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624768 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624784 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624800 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624816 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624836 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624853 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624871 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624920 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624939 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624954 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624971 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624990 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625007 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625024 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625042 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625061 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625116 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpmgx\" (UniqueName: \"kubernetes.io/projected/8a232e3c-1fa4-4163-bb31-bd2f9891f259-kube-api-access-tpmgx\") pod \"node-ca-wxkxk\" (UID: \"8a232e3c-1fa4-4163-bb31-bd2f9891f259\") " pod="openshift-image-registry/node-ca-wxkxk" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625137 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-multus-socket-dir-parent\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625155 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-run-netns\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625170 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-cni-netd\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625185 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-run-ovn\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625201 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-log-socket\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625224 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625241 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/aac6a472-a779-4ece-a897-c062a410c555-env-overrides\") pod \"ovnkube-control-plane-749d76644c-mjwsd\" (UID: \"aac6a472-a779-4ece-a897-c062a410c555\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625257 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-host-run-multus-certs\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.621523 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622221 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622364 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622493 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622480 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622626 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622685 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622747 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622870 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.622914 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625401 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623286 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623384 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623560 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623684 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623751 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.623988 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624173 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624356 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624304 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624569 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624574 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624917 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.624966 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625052 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625206 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625217 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625243 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625615 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625762 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625816 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625948 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.626076 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.626254 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.626271 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.626289 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.626506 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.626626 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.626690 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.626862 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.626975 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.627070 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.627117 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.627237 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.627771 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.627819 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.627814 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.628065 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.628078 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.628201 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.628478 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.628593 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.628707 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.628725 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.628754 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.628836 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.628908 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.628980 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.629002 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.629229 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.629389 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.629496 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.629511 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:29:55.129487139 +0000 UTC m=+93.523888789 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.629658 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.629694 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.629941 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.629955 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.630016 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.630119 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.630506 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.630702 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.630728 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.630996 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.631408 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.631444 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.631661 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.631844 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.631948 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.631277 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.632080 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.632466 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.632567 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.632712 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.632855 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.632980 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.632991 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.633043 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.633184 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.633656 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.635169 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.635179 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.633831 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.634337 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.634408 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.634705 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.634751 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.634939 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.635674 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.635827 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.636283 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.636342 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.636534 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.636748 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.637047 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.637539 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.637565 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.637546 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.637955 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.638140 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.638160 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.638229 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.638249 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.638254 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.638290 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.638295 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.638363 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.639416 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.639514 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.639685 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.639773 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.639903 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.639969 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.639963 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.640150 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.640279 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.640359 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.640383 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.640416 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.640442 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.640678 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.640948 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.641008 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.641048 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.641085 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.641141 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.641356 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.641390 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.641586 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.625272 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.641896 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-systemd-units\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.641956 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.641987 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-host-var-lib-cni-bin\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.642007 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/aac6a472-a779-4ece-a897-c062a410c555-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-mjwsd\" (UID: \"aac6a472-a779-4ece-a897-c062a410c555\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.642029 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-hostroot\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643019 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-etc-openvswitch\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643309 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fc2f1b1e-37c4-45c1-8f9c-221faf5b777d-proxy-tls\") pod \"machine-config-daemon-2s5wg\" (UID: \"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\") " pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643371 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643156 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643412 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/f741b698-d9d4-4e22-800a-91e67ca6e260-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643449 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643482 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-multus-cni-dir\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643512 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-ovnkube-config\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643547 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fc2f1b1e-37c4-45c1-8f9c-221faf5b777d-mcd-auth-proxy-config\") pod \"machine-config-daemon-2s5wg\" (UID: \"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\") " pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643572 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f741b698-d9d4-4e22-800a-91e67ca6e260-system-cni-dir\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643607 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643639 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-host-run-k8s-cni-cncf-io\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643670 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-var-lib-openvswitch\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643700 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/8a232e3c-1fa4-4163-bb31-bd2f9891f259-serviceca\") pod \"node-ca-wxkxk\" (UID: \"8a232e3c-1fa4-4163-bb31-bd2f9891f259\") " pod="openshift-image-registry/node-ca-wxkxk" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643728 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a961de01-e505-4c80-96a0-333da958a633-multus-daemon-config\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643758 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643797 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l5jtf\" (UniqueName: \"kubernetes.io/projected/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-kube-api-access-l5jtf\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643927 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f741b698-d9d4-4e22-800a-91e67ca6e260-cnibin\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643968 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f741b698-d9d4-4e22-800a-91e67ca6e260-cni-binary-copy\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643995 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8a232e3c-1fa4-4163-bb31-bd2f9891f259-host\") pod \"node-ca-wxkxk\" (UID: \"8a232e3c-1fa4-4163-bb31-bd2f9891f259\") " pod="openshift-image-registry/node-ca-wxkxk" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644032 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644067 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfpcq\" (UniqueName: \"kubernetes.io/projected/aac6a472-a779-4ece-a897-c062a410c555-kube-api-access-hfpcq\") pod \"ovnkube-control-plane-749d76644c-mjwsd\" (UID: \"aac6a472-a779-4ece-a897-c062a410c555\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644105 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.641822 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.641966 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.641944 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.641984 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.642011 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.642063 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.642308 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.642375 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.642597 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.642465 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.642640 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.642747 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.642910 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643139 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643254 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643261 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643342 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643605 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643630 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.643741 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644020 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644248 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fc2f1b1e-37c4-45c1-8f9c-221faf5b777d-rootfs\") pod \"machine-config-daemon-2s5wg\" (UID: \"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\") " pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644309 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4b021f0d-0615-479d-ab6c-6736222572f1-hosts-file\") pod \"node-resolver-24rf6\" (UID: \"4b021f0d-0615-479d-ab6c-6736222572f1\") " pod="openshift-dns/node-resolver-24rf6" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644340 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-host-var-lib-kubelet\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644366 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644388 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-run-ovn-kubernetes\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644412 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2fwk\" (UniqueName: \"kubernetes.io/projected/a961de01-e505-4c80-96a0-333da958a633-kube-api-access-f2fwk\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644432 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644494 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-kubelet\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644515 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-run-openvswitch\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644537 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-cnibin\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644556 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-host-var-lib-cni-multus\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644575 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-slash\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644618 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644638 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-os-release\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644658 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644680 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-cni-bin\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.644496 4906 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644751 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644775 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slkv2\" (UniqueName: \"kubernetes.io/projected/f741b698-d9d4-4e22-800a-91e67ca6e260-kube-api-access-slkv2\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644784 4906 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644798 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a961de01-e505-4c80-96a0-333da958a633-cni-binary-copy\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644815 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-etc-kubernetes\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644835 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-ovn-node-metrics-cert\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644853 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-ovnkube-script-lib\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644889 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66chc\" (UniqueName: \"kubernetes.io/projected/fc2f1b1e-37c4-45c1-8f9c-221faf5b777d-kube-api-access-66chc\") pod \"machine-config-daemon-2s5wg\" (UID: \"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\") " pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644907 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/aac6a472-a779-4ece-a897-c062a410c555-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-mjwsd\" (UID: \"aac6a472-a779-4ece-a897-c062a410c555\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644926 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-run-systemd\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644946 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fgsd\" (UniqueName: \"kubernetes.io/projected/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-kube-api-access-7fgsd\") pod \"network-metrics-daemon-6rvgh\" (UID: \"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\") " pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.644997 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltkxm\" (UniqueName: \"kubernetes.io/projected/4b021f0d-0615-479d-ab6c-6736222572f1-kube-api-access-ltkxm\") pod \"node-resolver-24rf6\" (UID: \"4b021f0d-0615-479d-ab6c-6736222572f1\") " pod="openshift-dns/node-resolver-24rf6" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.645018 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f741b698-d9d4-4e22-800a-91e67ca6e260-os-release\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.645038 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs\") pod \"network-metrics-daemon-6rvgh\" (UID: \"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\") " pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.645062 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.645082 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f741b698-d9d4-4e22-800a-91e67ca6e260-tuning-conf-dir\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.645110 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-system-cni-dir\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.645129 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-host-run-netns\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.645146 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-multus-conf-dir\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.645172 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-node-log\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.645199 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-env-overrides\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.645273 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.645377 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.645632 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-27 08:29:55.145610964 +0000 UTC m=+93.540012574 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.645927 4906 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.645973 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-27 08:29:55.145963034 +0000 UTC m=+93.540364644 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646061 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646390 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646455 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646472 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646488 4906 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646498 4906 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646509 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646522 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646533 4906 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646766 4906 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646791 4906 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646808 4906 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646820 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646831 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646841 4906 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646855 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646864 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646900 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646915 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646948 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646967 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.646988 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647002 4906 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647016 4906 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647026 4906 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647041 4906 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647051 4906 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647060 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647070 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647084 4906 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647094 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647104 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647117 4906 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647126 4906 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647136 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647147 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647158 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647168 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647177 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647186 4906 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647198 4906 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647207 4906 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647215 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647224 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647238 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647248 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647258 4906 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647270 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647280 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647288 4906 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647297 4906 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647309 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647317 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647326 4906 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647335 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647351 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647362 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647373 4906 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647386 4906 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647397 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647407 4906 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647418 4906 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647431 4906 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647442 4906 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647454 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647466 4906 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647483 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647497 4906 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647509 4906 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647522 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647539 4906 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647551 4906 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647562 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647578 4906 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647592 4906 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647605 4906 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647615 4906 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647631 4906 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647643 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647656 4906 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647669 4906 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647683 4906 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647694 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647706 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647717 4906 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647731 4906 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647743 4906 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647754 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647768 4906 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647782 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647793 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647806 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647821 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647833 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647844 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647856 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647869 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647940 4906 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647956 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647973 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647985 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647998 4906 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648009 4906 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648025 4906 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648035 4906 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.647932 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648049 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648253 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648275 4906 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648294 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648318 4906 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648335 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648348 4906 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648368 4906 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648382 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648497 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648518 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648590 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648605 4906 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648619 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648658 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648677 4906 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648693 4906 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648707 4906 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648720 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648741 4906 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648755 4906 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648768 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648786 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648800 4906 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648814 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648829 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648848 4906 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648863 4906 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648912 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648926 4906 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648940 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648951 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.648964 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.649374 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.651696 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.652578 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.654282 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.654499 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.654660 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.657637 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.659142 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.661129 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.661198 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.661214 4906 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.661314 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-27 08:29:55.161255386 +0000 UTC m=+93.555657066 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.661647 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.662319 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.662343 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.662360 4906 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.662416 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-27 08:29:55.162397848 +0000 UTC m=+93.556799538 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.662426 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.662468 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.662481 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.662500 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.662512 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:54Z","lastTransitionTime":"2026-02-27T08:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.663209 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.663698 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.663867 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.664763 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.667158 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.667304 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.667697 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.667627 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.669765 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.670374 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.671735 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.672084 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.673236 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.673408 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.673518 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.673528 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.673783 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.673852 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.674054 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.674218 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.674632 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.674938 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.675204 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.675538 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.677602 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.677663 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.679568 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.682540 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.684852 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.687805 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.688259 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.699974 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.701734 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.749605 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-host-var-lib-cni-bin\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.749645 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-etc-openvswitch\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.749663 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fc2f1b1e-37c4-45c1-8f9c-221faf5b777d-proxy-tls\") pod \"machine-config-daemon-2s5wg\" (UID: \"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\") " pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.749682 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/aac6a472-a779-4ece-a897-c062a410c555-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-mjwsd\" (UID: \"aac6a472-a779-4ece-a897-c062a410c555\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.749691 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-host-var-lib-cni-bin\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.749721 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-hostroot\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.749699 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-hostroot\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.749804 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-multus-cni-dir\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.749806 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-etc-openvswitch\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.749954 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-multus-cni-dir\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750002 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-ovnkube-config\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750046 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fc2f1b1e-37c4-45c1-8f9c-221faf5b777d-mcd-auth-proxy-config\") pod \"machine-config-daemon-2s5wg\" (UID: \"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\") " pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750080 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f741b698-d9d4-4e22-800a-91e67ca6e260-system-cni-dir\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750111 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/f741b698-d9d4-4e22-800a-91e67ca6e260-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750144 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-var-lib-openvswitch\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750176 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-host-run-k8s-cni-cncf-io\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750206 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a961de01-e505-4c80-96a0-333da958a633-multus-daemon-config\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750273 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750304 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l5jtf\" (UniqueName: \"kubernetes.io/projected/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-kube-api-access-l5jtf\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750336 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f741b698-d9d4-4e22-800a-91e67ca6e260-cnibin\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750368 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/8a232e3c-1fa4-4163-bb31-bd2f9891f259-serviceca\") pod \"node-ca-wxkxk\" (UID: \"8a232e3c-1fa4-4163-bb31-bd2f9891f259\") " pod="openshift-image-registry/node-ca-wxkxk" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750401 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfpcq\" (UniqueName: \"kubernetes.io/projected/aac6a472-a779-4ece-a897-c062a410c555-kube-api-access-hfpcq\") pod \"ovnkube-control-plane-749d76644c-mjwsd\" (UID: \"aac6a472-a779-4ece-a897-c062a410c555\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750448 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fc2f1b1e-37c4-45c1-8f9c-221faf5b777d-rootfs\") pod \"machine-config-daemon-2s5wg\" (UID: \"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\") " pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750479 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4b021f0d-0615-479d-ab6c-6736222572f1-hosts-file\") pod \"node-resolver-24rf6\" (UID: \"4b021f0d-0615-479d-ab6c-6736222572f1\") " pod="openshift-dns/node-resolver-24rf6" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750509 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f741b698-d9d4-4e22-800a-91e67ca6e260-cni-binary-copy\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750540 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8a232e3c-1fa4-4163-bb31-bd2f9891f259-host\") pod \"node-ca-wxkxk\" (UID: \"8a232e3c-1fa4-4163-bb31-bd2f9891f259\") " pod="openshift-image-registry/node-ca-wxkxk" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750588 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-run-ovn-kubernetes\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750621 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-host-var-lib-kubelet\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750654 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2fwk\" (UniqueName: \"kubernetes.io/projected/a961de01-e505-4c80-96a0-333da958a633-kube-api-access-f2fwk\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750685 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-kubelet\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750715 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-run-openvswitch\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750752 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-host-var-lib-cni-multus\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.750972 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fc2f1b1e-37c4-45c1-8f9c-221faf5b777d-mcd-auth-proxy-config\") pod \"machine-config-daemon-2s5wg\" (UID: \"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\") " pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751029 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751066 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f741b698-d9d4-4e22-800a-91e67ca6e260-cnibin\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751121 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-run-ovn-kubernetes\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751134 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-ovnkube-config\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751167 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/fc2f1b1e-37c4-45c1-8f9c-221faf5b777d-rootfs\") pod \"machine-config-daemon-2s5wg\" (UID: \"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\") " pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751218 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/4b021f0d-0615-479d-ab6c-6736222572f1-hosts-file\") pod \"node-resolver-24rf6\" (UID: \"4b021f0d-0615-479d-ab6c-6736222572f1\") " pod="openshift-dns/node-resolver-24rf6" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751345 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f741b698-d9d4-4e22-800a-91e67ca6e260-system-cni-dir\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751345 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8a232e3c-1fa4-4163-bb31-bd2f9891f259-host\") pod \"node-ca-wxkxk\" (UID: \"8a232e3c-1fa4-4163-bb31-bd2f9891f259\") " pod="openshift-image-registry/node-ca-wxkxk" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751384 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-host-run-k8s-cni-cncf-io\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751396 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-var-lib-openvswitch\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751440 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-slash\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751455 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-host-var-lib-kubelet\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751469 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-kubelet\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751515 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-run-openvswitch\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751555 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-slash\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751558 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-cnibin\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751570 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-host-var-lib-cni-multus\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751617 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-cnibin\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751665 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-cni-bin\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751706 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-cni-bin\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751728 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751759 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-os-release\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751788 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-etc-kubernetes\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751811 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-ovn-node-metrics-cert\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751831 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-ovnkube-script-lib\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751845 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-etc-kubernetes\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751869 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-os-release\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751901 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66chc\" (UniqueName: \"kubernetes.io/projected/fc2f1b1e-37c4-45c1-8f9c-221faf5b777d-kube-api-access-66chc\") pod \"machine-config-daemon-2s5wg\" (UID: \"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\") " pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751928 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751950 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slkv2\" (UniqueName: \"kubernetes.io/projected/f741b698-d9d4-4e22-800a-91e67ca6e260-kube-api-access-slkv2\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.751999 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a961de01-e505-4c80-96a0-333da958a633-cni-binary-copy\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752030 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fgsd\" (UniqueName: \"kubernetes.io/projected/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-kube-api-access-7fgsd\") pod \"network-metrics-daemon-6rvgh\" (UID: \"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\") " pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752063 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltkxm\" (UniqueName: \"kubernetes.io/projected/4b021f0d-0615-479d-ab6c-6736222572f1-kube-api-access-ltkxm\") pod \"node-resolver-24rf6\" (UID: \"4b021f0d-0615-479d-ab6c-6736222572f1\") " pod="openshift-dns/node-resolver-24rf6" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752091 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f741b698-d9d4-4e22-800a-91e67ca6e260-os-release\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752120 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/aac6a472-a779-4ece-a897-c062a410c555-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-mjwsd\" (UID: \"aac6a472-a779-4ece-a897-c062a410c555\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752149 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-run-systemd\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752178 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f741b698-d9d4-4e22-800a-91e67ca6e260-tuning-conf-dir\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752270 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs\") pod \"network-metrics-daemon-6rvgh\" (UID: \"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\") " pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752309 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-host-run-netns\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752340 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-multus-conf-dir\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752367 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-node-log\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752394 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-env-overrides\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752423 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-system-cni-dir\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752452 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-multus-socket-dir-parent\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752480 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-run-netns\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752530 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-cni-netd\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752560 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpmgx\" (UniqueName: \"kubernetes.io/projected/8a232e3c-1fa4-4163-bb31-bd2f9891f259-kube-api-access-tpmgx\") pod \"node-ca-wxkxk\" (UID: \"8a232e3c-1fa4-4163-bb31-bd2f9891f259\") " pod="openshift-image-registry/node-ca-wxkxk" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752592 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/aac6a472-a779-4ece-a897-c062a410c555-env-overrides\") pod \"ovnkube-control-plane-749d76644c-mjwsd\" (UID: \"aac6a472-a779-4ece-a897-c062a410c555\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752608 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-multus-conf-dir\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752627 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-host-run-multus-certs\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752634 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/8a232e3c-1fa4-4163-bb31-bd2f9891f259-serviceca\") pod \"node-ca-wxkxk\" (UID: \"8a232e3c-1fa4-4163-bb31-bd2f9891f259\") " pod="openshift-image-registry/node-ca-wxkxk" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752655 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752664 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f741b698-d9d4-4e22-800a-91e67ca6e260-os-release\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752687 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-systemd-units\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752718 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-run-ovn\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752746 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-log-socket\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752828 4906 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752849 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752865 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752907 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752926 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752944 4906 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752961 4906 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752978 4906 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.752994 4906 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753012 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753027 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753044 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753061 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753077 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753095 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753111 4906 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753127 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753144 4906 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753161 4906 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753177 4906 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753193 4906 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753200 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-run-systemd\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753209 4906 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753226 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753242 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753258 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753274 4906 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753293 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753308 4906 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753324 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753339 4906 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753354 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753372 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753389 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753408 4906 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753426 4906 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753441 4906 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753456 4906 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.753436 4906 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753509 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-node-log\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753178 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/aac6a472-a779-4ece-a897-c062a410c555-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-mjwsd\" (UID: \"aac6a472-a779-4ece-a897-c062a410c555\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" Feb 27 08:29:54 crc kubenswrapper[4906]: E0227 08:29:54.753560 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs podName:9bb2ded7-f8fe-4978-81cd-08cafe0fe124 nodeName:}" failed. No retries permitted until 2026-02-27 08:29:55.25353951 +0000 UTC m=+93.647941140 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs") pod "network-metrics-daemon-6rvgh" (UID: "9bb2ded7-f8fe-4978-81cd-08cafe0fe124") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753649 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-host-run-netns\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753472 4906 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.753968 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/f741b698-d9d4-4e22-800a-91e67ca6e260-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754035 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f741b698-d9d4-4e22-800a-91e67ca6e260-tuning-conf-dir\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754125 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-env-overrides\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754202 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-system-cni-dir\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754208 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-ovn-node-metrics-cert\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754263 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-cni-netd\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754270 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-multus-socket-dir-parent\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754281 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-run-netns\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754305 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/a961de01-e505-4c80-96a0-333da958a633-host-run-multus-certs\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754309 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-run-ovn\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754283 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754294 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-systemd-units\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754324 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-log-socket\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754405 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754455 4906 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754470 4906 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754470 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f741b698-d9d4-4e22-800a-91e67ca6e260-cni-binary-copy\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754440 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/a961de01-e505-4c80-96a0-333da958a633-cni-binary-copy\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754504 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754563 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754577 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754619 4906 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754635 4906 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754648 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754661 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754682 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/aac6a472-a779-4ece-a897-c062a410c555-env-overrides\") pod \"ovnkube-control-plane-749d76644c-mjwsd\" (UID: \"aac6a472-a779-4ece-a897-c062a410c555\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754702 4906 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754719 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754734 4906 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754746 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754758 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754800 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754813 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754825 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754837 4906 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754873 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.754943 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/a961de01-e505-4c80-96a0-333da958a633-multus-daemon-config\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.761100 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-ovnkube-script-lib\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.762282 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fc2f1b1e-37c4-45c1-8f9c-221faf5b777d-proxy-tls\") pod \"machine-config-daemon-2s5wg\" (UID: \"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\") " pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.769654 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/aac6a472-a779-4ece-a897-c062a410c555-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-mjwsd\" (UID: \"aac6a472-a779-4ece-a897-c062a410c555\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.771691 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fgsd\" (UniqueName: \"kubernetes.io/projected/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-kube-api-access-7fgsd\") pod \"network-metrics-daemon-6rvgh\" (UID: \"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\") " pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.772406 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.772422 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l5jtf\" (UniqueName: \"kubernetes.io/projected/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-kube-api-access-l5jtf\") pod \"ovnkube-node-lck5x\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.772445 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.772461 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.772479 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.772495 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:54Z","lastTransitionTime":"2026-02-27T08:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.774340 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpmgx\" (UniqueName: \"kubernetes.io/projected/8a232e3c-1fa4-4163-bb31-bd2f9891f259-kube-api-access-tpmgx\") pod \"node-ca-wxkxk\" (UID: \"8a232e3c-1fa4-4163-bb31-bd2f9891f259\") " pod="openshift-image-registry/node-ca-wxkxk" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.774705 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltkxm\" (UniqueName: \"kubernetes.io/projected/4b021f0d-0615-479d-ab6c-6736222572f1-kube-api-access-ltkxm\") pod \"node-resolver-24rf6\" (UID: \"4b021f0d-0615-479d-ab6c-6736222572f1\") " pod="openshift-dns/node-resolver-24rf6" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.775712 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66chc\" (UniqueName: \"kubernetes.io/projected/fc2f1b1e-37c4-45c1-8f9c-221faf5b777d-kube-api-access-66chc\") pod \"machine-config-daemon-2s5wg\" (UID: \"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\") " pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.775783 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slkv2\" (UniqueName: \"kubernetes.io/projected/f741b698-d9d4-4e22-800a-91e67ca6e260-kube-api-access-slkv2\") pod \"multus-additional-cni-plugins-9cqzh\" (UID: \"f741b698-d9d4-4e22-800a-91e67ca6e260\") " pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.775923 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfpcq\" (UniqueName: \"kubernetes.io/projected/aac6a472-a779-4ece-a897-c062a410c555-kube-api-access-hfpcq\") pod \"ovnkube-control-plane-749d76644c-mjwsd\" (UID: \"aac6a472-a779-4ece-a897-c062a410c555\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.776044 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2fwk\" (UniqueName: \"kubernetes.io/projected/a961de01-e505-4c80-96a0-333da958a633-kube-api-access-f2fwk\") pod \"multus-6nxxh\" (UID: \"a961de01-e505-4c80-96a0-333da958a633\") " pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.782836 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.792626 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.799829 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" Feb 27 08:29:54 crc kubenswrapper[4906]: W0227 08:29:54.804565 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0338a8dd_b2b2_44b7_a7a2_4a2a5b7a23f2.slice/crio-5f72107f76721706132c879200ae447ff3d514374d272267af6fc87de4400532 WatchSource:0}: Error finding container 5f72107f76721706132c879200ae447ff3d514374d272267af6fc87de4400532: Status 404 returned error can't find the container with id 5f72107f76721706132c879200ae447ff3d514374d272267af6fc87de4400532 Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.809088 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.819590 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 27 08:29:54 crc kubenswrapper[4906]: W0227 08:29:54.824733 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf741b698_d9d4_4e22_800a_91e67ca6e260.slice/crio-52848ae2c10cbd2c035368a4e896064f5c895959779f11c5152aa09804f49076 WatchSource:0}: Error finding container 52848ae2c10cbd2c035368a4e896064f5c895959779f11c5152aa09804f49076: Status 404 returned error can't find the container with id 52848ae2c10cbd2c035368a4e896064f5c895959779f11c5152aa09804f49076 Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.827245 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.836057 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-6nxxh" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.843097 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:29:54 crc kubenswrapper[4906]: W0227 08:29:54.845429 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaac6a472_a779_4ece_a897_c062a410c555.slice/crio-a6f832235a4334470aee68b2e963e4aa9788da1943c90ea5191d33c486e683e4 WatchSource:0}: Error finding container a6f832235a4334470aee68b2e963e4aa9788da1943c90ea5191d33c486e683e4: Status 404 returned error can't find the container with id a6f832235a4334470aee68b2e963e4aa9788da1943c90ea5191d33c486e683e4 Feb 27 08:29:54 crc kubenswrapper[4906]: W0227 08:29:54.848213 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-c2a4bc7f3ec64a6a0264cc72782dd5abc91ffbebbb12bd4c4f5a57819f108d65 WatchSource:0}: Error finding container c2a4bc7f3ec64a6a0264cc72782dd5abc91ffbebbb12bd4c4f5a57819f108d65: Status 404 returned error can't find the container with id c2a4bc7f3ec64a6a0264cc72782dd5abc91ffbebbb12bd4c4f5a57819f108d65 Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.852601 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-24rf6" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.859916 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-wxkxk" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.875325 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.875375 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.875395 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.875423 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.875440 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:54Z","lastTransitionTime":"2026-02-27T08:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:54 crc kubenswrapper[4906]: W0227 08:29:54.884206 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-b28a34c86aaca99652316420d2f0eefed915fd8c578fe6fa642712f094cfa58b WatchSource:0}: Error finding container b28a34c86aaca99652316420d2f0eefed915fd8c578fe6fa642712f094cfa58b: Status 404 returned error can't find the container with id b28a34c86aaca99652316420d2f0eefed915fd8c578fe6fa642712f094cfa58b Feb 27 08:29:54 crc kubenswrapper[4906]: W0227 08:29:54.899979 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda961de01_e505_4c80_96a0_333da958a633.slice/crio-428bee91fb2c029282b774ad8d35e6c6cf9e7c9477149a25ec2cc67e89494814 WatchSource:0}: Error finding container 428bee91fb2c029282b774ad8d35e6c6cf9e7c9477149a25ec2cc67e89494814: Status 404 returned error can't find the container with id 428bee91fb2c029282b774ad8d35e6c6cf9e7c9477149a25ec2cc67e89494814 Feb 27 08:29:54 crc kubenswrapper[4906]: W0227 08:29:54.910760 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc2f1b1e_37c4_45c1_8f9c_221faf5b777d.slice/crio-3bc0d9e3a528e2be1a279e4ab908175b0c7902a2dad592676a158bc0265d829b WatchSource:0}: Error finding container 3bc0d9e3a528e2be1a279e4ab908175b0c7902a2dad592676a158bc0265d829b: Status 404 returned error can't find the container with id 3bc0d9e3a528e2be1a279e4ab908175b0c7902a2dad592676a158bc0265d829b Feb 27 08:29:54 crc kubenswrapper[4906]: W0227 08:29:54.915673 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a232e3c_1fa4_4163_bb31_bd2f9891f259.slice/crio-45bbb136909166dedf9f7f04f10a48bdc3caff0bc188ce217094b2329ff9ac2f WatchSource:0}: Error finding container 45bbb136909166dedf9f7f04f10a48bdc3caff0bc188ce217094b2329ff9ac2f: Status 404 returned error can't find the container with id 45bbb136909166dedf9f7f04f10a48bdc3caff0bc188ce217094b2329ff9ac2f Feb 27 08:29:54 crc kubenswrapper[4906]: W0227 08:29:54.927574 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4b021f0d_0615_479d_ab6c_6736222572f1.slice/crio-5eb5cb8facfb532ddddc642c3f367e4e21808060107923f728fa996573629968 WatchSource:0}: Error finding container 5eb5cb8facfb532ddddc642c3f367e4e21808060107923f728fa996573629968: Status 404 returned error can't find the container with id 5eb5cb8facfb532ddddc642c3f367e4e21808060107923f728fa996573629968 Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.979026 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.979508 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.979527 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.979549 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:54 crc kubenswrapper[4906]: I0227 08:29:54.979565 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:54Z","lastTransitionTime":"2026-02-27T08:29:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.081802 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.081854 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.081865 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.081933 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.081944 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:55Z","lastTransitionTime":"2026-02-27T08:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.093192 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"b28a34c86aaca99652316420d2f0eefed915fd8c578fe6fa642712f094cfa58b"} Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.095747 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"c2a4bc7f3ec64a6a0264cc72782dd5abc91ffbebbb12bd4c4f5a57819f108d65"} Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.097755 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-6nxxh" event={"ID":"a961de01-e505-4c80-96a0-333da958a633","Type":"ContainerStarted","Data":"428bee91fb2c029282b774ad8d35e6c6cf9e7c9477149a25ec2cc67e89494814"} Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.099858 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" event={"ID":"f741b698-d9d4-4e22-800a-91e67ca6e260","Type":"ContainerStarted","Data":"52848ae2c10cbd2c035368a4e896064f5c895959779f11c5152aa09804f49076"} Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.102850 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"6085b702e3d34643dc93d2eeea667bf0924c9f3d6c6723e6d96663a601433a38"} Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.109102 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-24rf6" event={"ID":"4b021f0d-0615-479d-ab6c-6736222572f1","Type":"ContainerStarted","Data":"5eb5cb8facfb532ddddc642c3f367e4e21808060107923f728fa996573629968"} Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.114582 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerStarted","Data":"3bc0d9e3a528e2be1a279e4ab908175b0c7902a2dad592676a158bc0265d829b"} Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.118875 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" event={"ID":"aac6a472-a779-4ece-a897-c062a410c555","Type":"ContainerStarted","Data":"a6f832235a4334470aee68b2e963e4aa9788da1943c90ea5191d33c486e683e4"} Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.120491 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerStarted","Data":"ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88"} Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.120521 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerStarted","Data":"5f72107f76721706132c879200ae447ff3d514374d272267af6fc87de4400532"} Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.124721 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-wxkxk" event={"ID":"8a232e3c-1fa4-4163-bb31-bd2f9891f259","Type":"ContainerStarted","Data":"45bbb136909166dedf9f7f04f10a48bdc3caff0bc188ce217094b2329ff9ac2f"} Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.132935 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.144915 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.156055 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.159260 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.159439 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.159476 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:29:55 crc kubenswrapper[4906]: E0227 08:29:55.159725 4906 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 27 08:29:55 crc kubenswrapper[4906]: E0227 08:29:55.159806 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-27 08:29:56.159786095 +0000 UTC m=+94.554187805 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 27 08:29:55 crc kubenswrapper[4906]: E0227 08:29:55.160196 4906 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 27 08:29:55 crc kubenswrapper[4906]: E0227 08:29:55.160245 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-27 08:29:56.160228757 +0000 UTC m=+94.554630477 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 27 08:29:55 crc kubenswrapper[4906]: E0227 08:29:55.160265 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:29:56.160256858 +0000 UTC m=+94.554658608 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.167337 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.187123 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.210397 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.237607 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.237653 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.237666 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.237684 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.237696 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:55Z","lastTransitionTime":"2026-02-27T08:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.259794 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.260349 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs\") pod \"network-metrics-daemon-6rvgh\" (UID: \"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\") " pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.260383 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.260426 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:29:55 crc kubenswrapper[4906]: E0227 08:29:55.260558 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 27 08:29:55 crc kubenswrapper[4906]: E0227 08:29:55.260572 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 27 08:29:55 crc kubenswrapper[4906]: E0227 08:29:55.260583 4906 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:29:55 crc kubenswrapper[4906]: E0227 08:29:55.260618 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-27 08:29:56.260605109 +0000 UTC m=+94.655006719 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:29:55 crc kubenswrapper[4906]: E0227 08:29:55.260663 4906 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 27 08:29:55 crc kubenswrapper[4906]: E0227 08:29:55.260687 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs podName:9bb2ded7-f8fe-4978-81cd-08cafe0fe124 nodeName:}" failed. No retries permitted until 2026-02-27 08:29:56.260679531 +0000 UTC m=+94.655081141 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs") pod "network-metrics-daemon-6rvgh" (UID: "9bb2ded7-f8fe-4978-81cd-08cafe0fe124") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 27 08:29:55 crc kubenswrapper[4906]: E0227 08:29:55.260734 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 27 08:29:55 crc kubenswrapper[4906]: E0227 08:29:55.260743 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 27 08:29:55 crc kubenswrapper[4906]: E0227 08:29:55.260751 4906 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:29:55 crc kubenswrapper[4906]: E0227 08:29:55.260769 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-27 08:29:56.260764084 +0000 UTC m=+94.655165694 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.276154 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.286697 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.297088 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.311287 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.321064 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.331835 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.341000 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.341041 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.341053 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.341069 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.341081 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:55Z","lastTransitionTime":"2026-02-27T08:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.344176 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.443481 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.443512 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.443521 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.443535 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.443545 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:55Z","lastTransitionTime":"2026-02-27T08:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.545767 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.545803 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.545811 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.545825 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.545834 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:55Z","lastTransitionTime":"2026-02-27T08:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.648528 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.648570 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.648580 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.648596 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.648610 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:55Z","lastTransitionTime":"2026-02-27T08:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.751142 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.751457 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.751470 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.751489 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.751499 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:55Z","lastTransitionTime":"2026-02-27T08:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.853378 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.853418 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.853426 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.853440 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.853449 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:55Z","lastTransitionTime":"2026-02-27T08:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.955646 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.955688 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.955698 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.955713 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:55 crc kubenswrapper[4906]: I0227 08:29:55.955726 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:55Z","lastTransitionTime":"2026-02-27T08:29:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.057547 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.057588 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.057596 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.057610 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.057619 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:56Z","lastTransitionTime":"2026-02-27T08:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.128895 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.129840 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-24rf6" event={"ID":"4b021f0d-0615-479d-ab6c-6736222572f1","Type":"ContainerStarted","Data":"bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.132421 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-6nxxh" event={"ID":"a961de01-e505-4c80-96a0-333da958a633","Type":"ContainerStarted","Data":"ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.134430 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" event={"ID":"aac6a472-a779-4ece-a897-c062a410c555","Type":"ContainerStarted","Data":"6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.134479 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" event={"ID":"aac6a472-a779-4ece-a897-c062a410c555","Type":"ContainerStarted","Data":"a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.136811 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-acl-logging/0.log" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.137571 4906 generic.go:334] "Generic (PLEG): container finished" podID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerID="ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88" exitCode=0 Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.137601 4906 generic.go:334] "Generic (PLEG): container finished" podID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerID="78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae" exitCode=1 Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.137647 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerDied","Data":"ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.137679 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerStarted","Data":"210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.137706 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerStarted","Data":"32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.137718 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerDied","Data":"78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.137730 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerStarted","Data":"adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.139302 4906 generic.go:334] "Generic (PLEG): container finished" podID="f741b698-d9d4-4e22-800a-91e67ca6e260" containerID="3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6" exitCode=0 Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.139349 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" event={"ID":"f741b698-d9d4-4e22-800a-91e67ca6e260","Type":"ContainerDied","Data":"3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.141128 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-wxkxk" event={"ID":"8a232e3c-1fa4-4163-bb31-bd2f9891f259","Type":"ContainerStarted","Data":"c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.143373 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerStarted","Data":"8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.143409 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerStarted","Data":"4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.146218 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.146275 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.151392 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.159444 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.159555 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.159643 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.159743 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.159818 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:56Z","lastTransitionTime":"2026-02-27T08:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.166380 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.169148 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:29:56 crc kubenswrapper[4906]: E0227 08:29:56.169424 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:29:58.169399076 +0000 UTC m=+96.563800706 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.170461 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.170514 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:29:56 crc kubenswrapper[4906]: E0227 08:29:56.170659 4906 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 27 08:29:56 crc kubenswrapper[4906]: E0227 08:29:56.170705 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-27 08:29:58.170694653 +0000 UTC m=+96.565096263 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 27 08:29:56 crc kubenswrapper[4906]: E0227 08:29:56.171484 4906 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 27 08:29:56 crc kubenswrapper[4906]: E0227 08:29:56.171573 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-27 08:29:58.171550997 +0000 UTC m=+96.565952787 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.176929 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.186037 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.199554 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.213745 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.225129 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.236827 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.247616 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.259312 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.263006 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.263040 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.263053 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.263067 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.263077 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:56Z","lastTransitionTime":"2026-02-27T08:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.271250 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs\") pod \"network-metrics-daemon-6rvgh\" (UID: \"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\") " pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.271307 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.271350 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:29:56 crc kubenswrapper[4906]: E0227 08:29:56.271431 4906 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 27 08:29:56 crc kubenswrapper[4906]: E0227 08:29:56.271486 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 27 08:29:56 crc kubenswrapper[4906]: E0227 08:29:56.271527 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 27 08:29:56 crc kubenswrapper[4906]: E0227 08:29:56.271542 4906 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:29:56 crc kubenswrapper[4906]: E0227 08:29:56.271500 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs podName:9bb2ded7-f8fe-4978-81cd-08cafe0fe124 nodeName:}" failed. No retries permitted until 2026-02-27 08:29:58.271482757 +0000 UTC m=+96.665884367 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs") pod "network-metrics-daemon-6rvgh" (UID: "9bb2ded7-f8fe-4978-81cd-08cafe0fe124") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 27 08:29:56 crc kubenswrapper[4906]: E0227 08:29:56.271918 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-27 08:29:58.271871538 +0000 UTC m=+96.666273218 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:29:56 crc kubenswrapper[4906]: E0227 08:29:56.271991 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 27 08:29:56 crc kubenswrapper[4906]: E0227 08:29:56.272002 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 27 08:29:56 crc kubenswrapper[4906]: E0227 08:29:56.272013 4906 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:29:56 crc kubenswrapper[4906]: E0227 08:29:56.272046 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-27 08:29:58.272036493 +0000 UTC m=+96.666438203 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.274778 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.284724 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.297687 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.307213 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.323590 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.334402 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.345201 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.356995 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.365855 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.365926 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.365938 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.365958 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.365974 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:56Z","lastTransitionTime":"2026-02-27T08:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.371714 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.383323 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.392096 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.403714 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.428169 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:56Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.446090 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:56Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.458575 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:56Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.468524 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.468559 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.468569 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.468588 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.468600 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:56Z","lastTransitionTime":"2026-02-27T08:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.475074 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:56Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.492919 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:56Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.505254 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:56Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.551153 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.551271 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:29:56 crc kubenswrapper[4906]: E0227 08:29:56.551320 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:29:56 crc kubenswrapper[4906]: E0227 08:29:56.551471 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.551563 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:29:56 crc kubenswrapper[4906]: E0227 08:29:56.551617 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.551785 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:29:56 crc kubenswrapper[4906]: E0227 08:29:56.551866 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.557804 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.558800 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.560245 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.561059 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.562321 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.563608 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.564462 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.565734 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.566547 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.567698 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.568367 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.570190 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.570481 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.570521 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.570533 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.570547 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.570558 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:56Z","lastTransitionTime":"2026-02-27T08:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.570867 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.571531 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.572626 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.573277 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.574469 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.575162 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.575857 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.577102 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.577711 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.578978 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.579548 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.580839 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.581425 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.582490 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.583808 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.584423 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.585646 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.586291 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.587430 4906 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.587568 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.589739 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.590485 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.591774 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.593612 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.594485 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.595641 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.596559 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.597829 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.598497 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.599743 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.600772 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.602369 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.603186 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.604341 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.605059 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.606493 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.607170 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.608254 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.608907 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.609570 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.610764 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.611393 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.672958 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.673027 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.673041 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.673060 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.673072 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:56Z","lastTransitionTime":"2026-02-27T08:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.775841 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.775969 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.775993 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.776025 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.776049 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:56Z","lastTransitionTime":"2026-02-27T08:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.878872 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.878950 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.878964 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.878986 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.879003 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:56Z","lastTransitionTime":"2026-02-27T08:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.981938 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.981992 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.982006 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.982023 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:56 crc kubenswrapper[4906]: I0227 08:29:56.982036 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:56Z","lastTransitionTime":"2026-02-27T08:29:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.084355 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.084418 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.084434 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.084450 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.084461 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:57Z","lastTransitionTime":"2026-02-27T08:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.154322 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-acl-logging/0.log" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.155727 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerStarted","Data":"a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7"} Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.155791 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerStarted","Data":"f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f"} Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.158273 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" event={"ID":"f741b698-d9d4-4e22-800a-91e67ca6e260","Type":"ContainerStarted","Data":"afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923"} Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.174050 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:57Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.187061 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.187104 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.187114 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.187131 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.187144 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:57Z","lastTransitionTime":"2026-02-27T08:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.197150 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:57Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.215393 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:57Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.234615 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:57Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.250466 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:57Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.292672 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.292718 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.292732 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.292753 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.292767 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:57Z","lastTransitionTime":"2026-02-27T08:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.293166 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:57Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.311363 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:57Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.327694 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:57Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.342063 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:57Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.361667 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:57Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.379726 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:57Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.395680 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:57Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.396014 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.396042 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.396050 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.396063 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.396072 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:57Z","lastTransitionTime":"2026-02-27T08:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.411601 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:57Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.423392 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:57Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.497997 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.498038 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.498047 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.498063 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.498073 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:57Z","lastTransitionTime":"2026-02-27T08:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.599980 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.600036 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.600047 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.600067 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.600079 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:57Z","lastTransitionTime":"2026-02-27T08:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.702685 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.702720 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.702731 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.702744 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.702753 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:57Z","lastTransitionTime":"2026-02-27T08:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.805430 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.805483 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.805496 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.805514 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.805782 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:57Z","lastTransitionTime":"2026-02-27T08:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.907990 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.908032 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.908046 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.908062 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:57 crc kubenswrapper[4906]: I0227 08:29:57.908074 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:57Z","lastTransitionTime":"2026-02-27T08:29:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.012422 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.012866 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.012907 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.012930 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.012946 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:58Z","lastTransitionTime":"2026-02-27T08:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.115747 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.115780 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.115788 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.115803 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.115811 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:58Z","lastTransitionTime":"2026-02-27T08:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.164306 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324"} Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.168933 4906 generic.go:334] "Generic (PLEG): container finished" podID="f741b698-d9d4-4e22-800a-91e67ca6e260" containerID="afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923" exitCode=0 Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.169004 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" event={"ID":"f741b698-d9d4-4e22-800a-91e67ca6e260","Type":"ContainerDied","Data":"afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923"} Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.184537 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.195474 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.195644 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.195726 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:29:58 crc kubenswrapper[4906]: E0227 08:29:58.195904 4906 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 27 08:29:58 crc kubenswrapper[4906]: E0227 08:29:58.195966 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-27 08:30:02.195949307 +0000 UTC m=+100.590350917 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 27 08:29:58 crc kubenswrapper[4906]: E0227 08:29:58.196353 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:30:02.196340498 +0000 UTC m=+100.590742108 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:29:58 crc kubenswrapper[4906]: E0227 08:29:58.196434 4906 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 27 08:29:58 crc kubenswrapper[4906]: E0227 08:29:58.196465 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-27 08:30:02.196456422 +0000 UTC m=+100.590858032 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.198944 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.213774 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.219516 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.219597 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.219612 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.219635 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.219651 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:58Z","lastTransitionTime":"2026-02-27T08:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.233680 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.251351 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.287599 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.297139 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.297257 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.297350 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs\") pod \"network-metrics-daemon-6rvgh\" (UID: \"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\") " pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:29:58 crc kubenswrapper[4906]: E0227 08:29:58.297444 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 27 08:29:58 crc kubenswrapper[4906]: E0227 08:29:58.297483 4906 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 27 08:29:58 crc kubenswrapper[4906]: E0227 08:29:58.297496 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 27 08:29:58 crc kubenswrapper[4906]: E0227 08:29:58.297518 4906 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:29:58 crc kubenswrapper[4906]: E0227 08:29:58.297443 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 27 08:29:58 crc kubenswrapper[4906]: E0227 08:29:58.297542 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs podName:9bb2ded7-f8fe-4978-81cd-08cafe0fe124 nodeName:}" failed. No retries permitted until 2026-02-27 08:30:02.297524834 +0000 UTC m=+100.691926444 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs") pod "network-metrics-daemon-6rvgh" (UID: "9bb2ded7-f8fe-4978-81cd-08cafe0fe124") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 27 08:29:58 crc kubenswrapper[4906]: E0227 08:29:58.297579 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 27 08:29:58 crc kubenswrapper[4906]: E0227 08:29:58.297592 4906 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:29:58 crc kubenswrapper[4906]: E0227 08:29:58.297610 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-27 08:30:02.297581336 +0000 UTC m=+100.691982956 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:29:58 crc kubenswrapper[4906]: E0227 08:29:58.297631 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-27 08:30:02.297623487 +0000 UTC m=+100.692025307 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.322628 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.322672 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.322680 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.322699 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.322717 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:58Z","lastTransitionTime":"2026-02-27T08:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.349461 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.364855 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.377694 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.390408 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.402535 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.419516 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.425026 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.425129 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.425384 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.425433 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.425449 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:58Z","lastTransitionTime":"2026-02-27T08:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.436534 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.451410 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.464129 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.486492 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.505855 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.527842 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.529589 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.529631 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.529644 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.529658 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.529668 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:58Z","lastTransitionTime":"2026-02-27T08:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.542603 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.551590 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.551665 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:29:58 crc kubenswrapper[4906]: E0227 08:29:58.551747 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:29:58 crc kubenswrapper[4906]: E0227 08:29:58.551874 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.552013 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.552070 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:29:58 crc kubenswrapper[4906]: E0227 08:29:58.552145 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:29:58 crc kubenswrapper[4906]: E0227 08:29:58.552222 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.558777 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.572610 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.588750 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.615053 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.631064 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.631919 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.631946 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.631955 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.631968 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.631978 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:58Z","lastTransitionTime":"2026-02-27T08:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.648854 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.665993 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.681924 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.702278 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:58Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.733841 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.733903 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.733915 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.733930 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.733941 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:58Z","lastTransitionTime":"2026-02-27T08:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.837310 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.837386 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.837404 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.837428 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.837447 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:58Z","lastTransitionTime":"2026-02-27T08:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.939563 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.939597 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.939606 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.939620 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:58 crc kubenswrapper[4906]: I0227 08:29:58.939629 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:58Z","lastTransitionTime":"2026-02-27T08:29:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.042067 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.042115 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.042129 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.042148 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.042161 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:59Z","lastTransitionTime":"2026-02-27T08:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.144572 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.144608 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.144616 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.144630 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.144640 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:59Z","lastTransitionTime":"2026-02-27T08:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.177717 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-acl-logging/0.log" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.178650 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerStarted","Data":"5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c"} Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.181011 4906 generic.go:334] "Generic (PLEG): container finished" podID="f741b698-d9d4-4e22-800a-91e67ca6e260" containerID="41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9" exitCode=0 Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.181121 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" event={"ID":"f741b698-d9d4-4e22-800a-91e67ca6e260","Type":"ContainerDied","Data":"41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9"} Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.202471 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:59Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.224576 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:59Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.247495 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:59Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.250675 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.250737 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.250754 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.250779 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.250794 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:59Z","lastTransitionTime":"2026-02-27T08:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.264270 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:59Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.281525 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:59Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.299158 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:59Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.312615 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:59Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.327349 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:59Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.344823 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:59Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.354752 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.354790 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.354803 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.354821 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.354834 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:59Z","lastTransitionTime":"2026-02-27T08:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.357810 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:59Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.367517 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:59Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.392333 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:59Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.408260 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:59Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.421866 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:29:59Z is after 2025-08-24T17:21:41Z" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.458627 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.458685 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.458699 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.458724 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.458740 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:59Z","lastTransitionTime":"2026-02-27T08:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.562321 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.562705 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.562719 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.562743 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.562759 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:59Z","lastTransitionTime":"2026-02-27T08:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.571145 4906 scope.go:117] "RemoveContainer" containerID="235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5" Feb 27 08:29:59 crc kubenswrapper[4906]: E0227 08:29:59.571816 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.573415 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.667236 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.667303 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.667317 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.667339 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.667353 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:59Z","lastTransitionTime":"2026-02-27T08:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.769903 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.769947 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.769958 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.769976 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.769987 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:59Z","lastTransitionTime":"2026-02-27T08:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.872484 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.872533 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.872546 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.872565 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.872578 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:59Z","lastTransitionTime":"2026-02-27T08:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.975209 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.975259 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.975270 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.975287 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:29:59 crc kubenswrapper[4906]: I0227 08:29:59.975298 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:29:59Z","lastTransitionTime":"2026-02-27T08:29:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.078259 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.078314 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.078327 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.078345 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.078359 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:00Z","lastTransitionTime":"2026-02-27T08:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.181903 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.181957 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.181968 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.181986 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.181998 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:00Z","lastTransitionTime":"2026-02-27T08:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.189024 4906 generic.go:334] "Generic (PLEG): container finished" podID="f741b698-d9d4-4e22-800a-91e67ca6e260" containerID="64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4" exitCode=0 Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.189126 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" event={"ID":"f741b698-d9d4-4e22-800a-91e67ca6e260","Type":"ContainerDied","Data":"64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4"} Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.189657 4906 scope.go:117] "RemoveContainer" containerID="235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5" Feb 27 08:30:00 crc kubenswrapper[4906]: E0227 08:30:00.189872 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.207599 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:00Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.225319 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:00Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.238978 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:00Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.257784 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:00Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.275143 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:00Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.286460 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.286505 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.286518 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.286536 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.286549 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:00Z","lastTransitionTime":"2026-02-27T08:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.299400 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:00Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.312826 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:00Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.324653 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:00Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.337622 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:00Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.353709 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:00Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.369088 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:00Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.385618 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:00Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.389909 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.389960 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.389972 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.389993 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.390006 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:00Z","lastTransitionTime":"2026-02-27T08:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.401869 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:00Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.418494 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:00Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.432158 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:00Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.493159 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.493218 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.493230 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.493249 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.493261 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:00Z","lastTransitionTime":"2026-02-27T08:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.553243 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.553679 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:00 crc kubenswrapper[4906]: E0227 08:30:00.553814 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.553809 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:00 crc kubenswrapper[4906]: E0227 08:30:00.554038 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:00 crc kubenswrapper[4906]: E0227 08:30:00.553862 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.553679 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:00 crc kubenswrapper[4906]: E0227 08:30:00.554265 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.596344 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.596488 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.596569 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.596663 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.596793 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:00Z","lastTransitionTime":"2026-02-27T08:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.699651 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.699684 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.699694 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.699709 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.699719 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:00Z","lastTransitionTime":"2026-02-27T08:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.802898 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.802952 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.802966 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.802987 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.803001 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:00Z","lastTransitionTime":"2026-02-27T08:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.905701 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.905735 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.905747 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.905764 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:00 crc kubenswrapper[4906]: I0227 08:30:00.905775 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:00Z","lastTransitionTime":"2026-02-27T08:30:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.008464 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.008503 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.008514 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.008528 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.008538 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:01Z","lastTransitionTime":"2026-02-27T08:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.111050 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.111094 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.111107 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.111123 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.111134 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:01Z","lastTransitionTime":"2026-02-27T08:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.196581 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" event={"ID":"f741b698-d9d4-4e22-800a-91e67ca6e260","Type":"ContainerStarted","Data":"e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5"} Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.209856 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:01Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.214024 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.214068 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.214083 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.214103 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.214116 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:01Z","lastTransitionTime":"2026-02-27T08:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.220643 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:01Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.235711 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:01Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.253776 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:01Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.266944 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:01Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.278685 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:01Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.288447 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:01Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.311258 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:01Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.316544 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.316601 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.316615 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.316662 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.316685 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:01Z","lastTransitionTime":"2026-02-27T08:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.327537 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:01Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.340383 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:01Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.352067 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:01Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.369119 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:01Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.384698 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:01Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.399603 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:01Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.412123 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:01Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.419774 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.419824 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.419837 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.419862 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.419875 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:01Z","lastTransitionTime":"2026-02-27T08:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.522271 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.522322 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.522336 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.522356 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.522371 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:01Z","lastTransitionTime":"2026-02-27T08:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.625067 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.625094 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.625127 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.625156 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.625164 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:01Z","lastTransitionTime":"2026-02-27T08:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.727379 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.727422 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.727435 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.727451 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.727462 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:01Z","lastTransitionTime":"2026-02-27T08:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.830106 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.830310 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.830326 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.830344 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.830361 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:01Z","lastTransitionTime":"2026-02-27T08:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.933095 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.933152 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.933170 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.933192 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:01 crc kubenswrapper[4906]: I0227 08:30:01.933207 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:01Z","lastTransitionTime":"2026-02-27T08:30:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.036027 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.036068 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.036078 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.036093 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.036105 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:02Z","lastTransitionTime":"2026-02-27T08:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.138404 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.138474 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.138488 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.138507 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.138523 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:02Z","lastTransitionTime":"2026-02-27T08:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.203289 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-acl-logging/0.log" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.204111 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerStarted","Data":"22b95afde07822153b75cf5604afa0207fc62112222cd067bd14fcd9057c790d"} Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.204954 4906 scope.go:117] "RemoveContainer" containerID="78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.246905 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.246937 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.246946 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.246962 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.246972 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:02Z","lastTransitionTime":"2026-02-27T08:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.247096 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.247198 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:02 crc kubenswrapper[4906]: E0227 08:30:02.247264 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:30:10.247241038 +0000 UTC m=+108.641642648 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:30:02 crc kubenswrapper[4906]: E0227 08:30:02.247283 4906 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 27 08:30:02 crc kubenswrapper[4906]: E0227 08:30:02.247378 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-27 08:30:10.247352481 +0000 UTC m=+108.641754141 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.247426 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:02 crc kubenswrapper[4906]: E0227 08:30:02.247541 4906 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 27 08:30:02 crc kubenswrapper[4906]: E0227 08:30:02.247598 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-27 08:30:10.247584988 +0000 UTC m=+108.641986618 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.249941 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.272669 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.291615 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.308555 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.322460 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.343242 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-acl-logging nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-acl-logging nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22b95afde07822153b75cf5604afa0207fc62112222cd067bd14fcd9057c790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.348305 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs\") pod \"network-metrics-daemon-6rvgh\" (UID: \"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\") " pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.348357 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.348416 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:02 crc kubenswrapper[4906]: E0227 08:30:02.348540 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 27 08:30:02 crc kubenswrapper[4906]: E0227 08:30:02.348560 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 27 08:30:02 crc kubenswrapper[4906]: E0227 08:30:02.348571 4906 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:30:02 crc kubenswrapper[4906]: E0227 08:30:02.348607 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-27 08:30:10.348595177 +0000 UTC m=+108.742996787 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:30:02 crc kubenswrapper[4906]: E0227 08:30:02.350446 4906 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 27 08:30:02 crc kubenswrapper[4906]: E0227 08:30:02.350546 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs podName:9bb2ded7-f8fe-4978-81cd-08cafe0fe124 nodeName:}" failed. No retries permitted until 2026-02-27 08:30:10.350517811 +0000 UTC m=+108.744919431 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs") pod "network-metrics-daemon-6rvgh" (UID: "9bb2ded7-f8fe-4978-81cd-08cafe0fe124") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 27 08:30:02 crc kubenswrapper[4906]: E0227 08:30:02.350671 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 27 08:30:02 crc kubenswrapper[4906]: E0227 08:30:02.350690 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 27 08:30:02 crc kubenswrapper[4906]: E0227 08:30:02.350706 4906 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:30:02 crc kubenswrapper[4906]: E0227 08:30:02.350782 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-27 08:30:10.350751128 +0000 UTC m=+108.745152758 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.351196 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.351256 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.351271 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.351307 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.351321 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:02Z","lastTransitionTime":"2026-02-27T08:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.359687 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.369779 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.382544 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.399286 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.416757 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.429714 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.441279 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.454396 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.454436 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.454447 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.454463 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.454475 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:02Z","lastTransitionTime":"2026-02-27T08:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.457432 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.468726 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.551173 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:02 crc kubenswrapper[4906]: E0227 08:30:02.551283 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.551173 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.551411 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.551575 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:02 crc kubenswrapper[4906]: E0227 08:30:02.551573 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:02 crc kubenswrapper[4906]: E0227 08:30:02.551695 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:02 crc kubenswrapper[4906]: E0227 08:30:02.551776 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.556415 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.556452 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.556465 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.556482 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.556495 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:02Z","lastTransitionTime":"2026-02-27T08:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.565975 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.579144 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.600394 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-acl-logging nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-acl-logging nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22b95afde07822153b75cf5604afa0207fc62112222cd067bd14fcd9057c790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.616254 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.630197 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.643922 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.659658 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.659720 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.659735 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.659759 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.659773 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:02Z","lastTransitionTime":"2026-02-27T08:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.662548 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.721599 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.738078 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.750715 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.762677 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.762719 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.762728 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.762741 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.762750 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:02Z","lastTransitionTime":"2026-02-27T08:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.764997 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.776505 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.791016 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.811473 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.832287 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.865924 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.865966 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.865974 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.865987 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.865996 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:02Z","lastTransitionTime":"2026-02-27T08:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.968479 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.968739 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.968815 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.968985 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:02 crc kubenswrapper[4906]: I0227 08:30:02.969074 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:02Z","lastTransitionTime":"2026-02-27T08:30:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.074222 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.074601 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.074691 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.074771 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.074940 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:03Z","lastTransitionTime":"2026-02-27T08:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.177961 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.178202 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.178294 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.178386 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.178506 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:03Z","lastTransitionTime":"2026-02-27T08:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.214208 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-acl-logging/0.log" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.215696 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerStarted","Data":"4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28"} Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.215967 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.216138 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.216297 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.222797 4906 generic.go:334] "Generic (PLEG): container finished" podID="f741b698-d9d4-4e22-800a-91e67ca6e260" containerID="e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5" exitCode=0 Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.222841 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" event={"ID":"f741b698-d9d4-4e22-800a-91e67ca6e260","Type":"ContainerDied","Data":"e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5"} Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.234454 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.248044 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.255276 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.260095 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.261620 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.280385 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.284063 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.284097 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.284112 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.284127 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.284139 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:03Z","lastTransitionTime":"2026-02-27T08:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.298620 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.311085 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.320904 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.334350 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.355302 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22b95afde07822153b75cf5604afa0207fc62112222cd067bd14fcd9057c790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.370555 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.384365 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.386923 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.386966 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.386977 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.386999 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.387010 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:03Z","lastTransitionTime":"2026-02-27T08:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.399650 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.412386 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.427349 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.439653 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.452663 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.465114 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.477457 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.489154 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.489191 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.489201 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.489218 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.489230 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:03Z","lastTransitionTime":"2026-02-27T08:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.501187 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22b95afde07822153b75cf5604afa0207fc62112222cd067bd14fcd9057c790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.519492 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.531998 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.546207 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.562273 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.581106 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.591902 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.592159 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.592323 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.592435 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.592508 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:03Z","lastTransitionTime":"2026-02-27T08:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.596344 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.611985 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.626760 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.640717 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.652651 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.664943 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:03Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.695160 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.695201 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.695213 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.695231 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.695243 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:03Z","lastTransitionTime":"2026-02-27T08:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.797837 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.798142 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.798221 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.798312 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.798387 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:03Z","lastTransitionTime":"2026-02-27T08:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.900560 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.900870 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.900983 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.901903 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:03 crc kubenswrapper[4906]: I0227 08:30:03.901947 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:03Z","lastTransitionTime":"2026-02-27T08:30:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.005411 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.005461 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.005472 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.005490 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.005505 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:04Z","lastTransitionTime":"2026-02-27T08:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.107786 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.108169 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.108313 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.108462 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.108560 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:04Z","lastTransitionTime":"2026-02-27T08:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.211307 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.211616 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.211712 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.211804 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.211923 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:04Z","lastTransitionTime":"2026-02-27T08:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.229709 4906 generic.go:334] "Generic (PLEG): container finished" podID="f741b698-d9d4-4e22-800a-91e67ca6e260" containerID="9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1" exitCode=0 Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.229804 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" event={"ID":"f741b698-d9d4-4e22-800a-91e67ca6e260","Type":"ContainerDied","Data":"9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1"} Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.250133 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:04Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.271401 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:04Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.290765 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:04Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.312119 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:04Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.314741 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.314993 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.315109 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.315220 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.315305 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:04Z","lastTransitionTime":"2026-02-27T08:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.329738 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:04Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.351337 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22b95afde07822153b75cf5604afa0207fc62112222cd067bd14fcd9057c790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:04Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.365532 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:04Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.381252 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:04Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.394076 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:04Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.409469 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:04Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.418558 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.418795 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.418869 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.418959 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.419026 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:04Z","lastTransitionTime":"2026-02-27T08:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.426013 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:04Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.438470 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:04Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.450614 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:04Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.469255 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:04Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.481076 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:04Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.521443 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.521950 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.522047 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.522134 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.522204 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:04Z","lastTransitionTime":"2026-02-27T08:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.551404 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.551433 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.551434 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.551479 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:04 crc kubenswrapper[4906]: E0227 08:30:04.551575 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:04 crc kubenswrapper[4906]: E0227 08:30:04.551937 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:04 crc kubenswrapper[4906]: E0227 08:30:04.552014 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:04 crc kubenswrapper[4906]: E0227 08:30:04.552075 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.625493 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.625523 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.625531 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.625544 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.625554 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:04Z","lastTransitionTime":"2026-02-27T08:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.730219 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.730750 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.730775 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.730808 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.730832 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:04Z","lastTransitionTime":"2026-02-27T08:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.757516 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.757725 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.757815 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.757933 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.758013 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:04Z","lastTransitionTime":"2026-02-27T08:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:04 crc kubenswrapper[4906]: E0227 08:30:04.779945 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:04Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.786787 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.787085 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.787299 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.787502 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.787715 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:04Z","lastTransitionTime":"2026-02-27T08:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:04 crc kubenswrapper[4906]: E0227 08:30:04.806360 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:04Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.816681 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.816719 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.816730 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.816745 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.816758 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:04Z","lastTransitionTime":"2026-02-27T08:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:04 crc kubenswrapper[4906]: E0227 08:30:04.828182 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:04Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.832115 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.832357 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.832441 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.832533 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.832603 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:04Z","lastTransitionTime":"2026-02-27T08:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:04 crc kubenswrapper[4906]: E0227 08:30:04.847202 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:04Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.851356 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.851388 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.851399 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.851416 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.851430 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:04Z","lastTransitionTime":"2026-02-27T08:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:04 crc kubenswrapper[4906]: E0227 08:30:04.864571 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:04Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:04 crc kubenswrapper[4906]: E0227 08:30:04.864678 4906 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.866150 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.866165 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.866174 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.866186 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.866195 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:04Z","lastTransitionTime":"2026-02-27T08:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.968619 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.968659 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.968667 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.968681 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:04 crc kubenswrapper[4906]: I0227 08:30:04.968690 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:04Z","lastTransitionTime":"2026-02-27T08:30:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.072129 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.072190 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.072205 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.072223 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.072235 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:05Z","lastTransitionTime":"2026-02-27T08:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.175539 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.175589 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.175601 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.175618 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.175630 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:05Z","lastTransitionTime":"2026-02-27T08:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.238985 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovnkube-controller/0.log" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.243764 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-acl-logging/0.log" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.245356 4906 generic.go:334] "Generic (PLEG): container finished" podID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerID="22b95afde07822153b75cf5604afa0207fc62112222cd067bd14fcd9057c790d" exitCode=1 Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.245922 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerDied","Data":"22b95afde07822153b75cf5604afa0207fc62112222cd067bd14fcd9057c790d"} Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.247426 4906 scope.go:117] "RemoveContainer" containerID="22b95afde07822153b75cf5604afa0207fc62112222cd067bd14fcd9057c790d" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.256362 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" event={"ID":"f741b698-d9d4-4e22-800a-91e67ca6e260","Type":"ContainerStarted","Data":"1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567"} Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.276118 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22b95afde07822153b75cf5604afa0207fc62112222cd067bd14fcd9057c790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b95afde07822153b75cf5604afa0207fc62112222cd067bd14fcd9057c790d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:04.996775 6688 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0227 08:30:04.996815 6688 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0227 08:30:04.996833 6688 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0227 08:30:04.996856 6688 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0227 08:30:04.996861 6688 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0227 08:30:04.996939 6688 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0227 08:30:04.996950 6688 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0227 08:30:04.996966 6688 factory.go:656] Stopping watch factory\\\\nI0227 08:30:04.996983 6688 ovnkube.go:599] Stopped ovnkube\\\\nI0227 08:30:04.997023 6688 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0227 08:30:04.997023 6688 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0227 08:30:04.997034 6688 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0227 08:30:04.997042 6688 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0227 08:30:04.997047 6688 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0227 08:30:04.997054 6688 handler.go:208] Removed *v1.Node event handler 2\\\\nI02\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.278594 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.278685 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.278703 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.278723 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.278740 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:05Z","lastTransitionTime":"2026-02-27T08:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.301127 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.314025 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.328050 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.347095 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.361956 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.377508 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.383361 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.383603 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.383840 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.384062 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.384272 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:05Z","lastTransitionTime":"2026-02-27T08:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.393903 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.408635 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.425985 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.441512 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.461924 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.480429 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.487395 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.487429 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.487439 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.487456 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.487471 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:05Z","lastTransitionTime":"2026-02-27T08:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.492896 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.508338 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.523642 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.534566 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.548807 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.563546 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.575943 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.589093 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.590372 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.590449 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.590465 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.590489 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.590527 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:05Z","lastTransitionTime":"2026-02-27T08:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.610558 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://22b95afde07822153b75cf5604afa0207fc62112222cd067bd14fcd9057c790d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b95afde07822153b75cf5604afa0207fc62112222cd067bd14fcd9057c790d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:04.996775 6688 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0227 08:30:04.996815 6688 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0227 08:30:04.996833 6688 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0227 08:30:04.996856 6688 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0227 08:30:04.996861 6688 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0227 08:30:04.996939 6688 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0227 08:30:04.996950 6688 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0227 08:30:04.996966 6688 factory.go:656] Stopping watch factory\\\\nI0227 08:30:04.996983 6688 ovnkube.go:599] Stopped ovnkube\\\\nI0227 08:30:04.997023 6688 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0227 08:30:04.997023 6688 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0227 08:30:04.997034 6688 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0227 08:30:04.997042 6688 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0227 08:30:04.997047 6688 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0227 08:30:04.997054 6688 handler.go:208] Removed *v1.Node event handler 2\\\\nI02\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:01Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.628534 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.642756 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.659373 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.675616 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.691264 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.696169 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.696239 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.696260 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.696284 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.696307 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:05Z","lastTransitionTime":"2026-02-27T08:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.709177 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.723031 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.733168 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:05Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.800375 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.800433 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.800446 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.800467 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.800482 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:05Z","lastTransitionTime":"2026-02-27T08:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.903756 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.903807 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.903817 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.903838 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:05 crc kubenswrapper[4906]: I0227 08:30:05.903852 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:05Z","lastTransitionTime":"2026-02-27T08:30:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.006649 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.006695 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.006709 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.006732 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.006748 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:06Z","lastTransitionTime":"2026-02-27T08:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.110833 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.110907 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.110922 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.110946 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.110959 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:06Z","lastTransitionTime":"2026-02-27T08:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.214857 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.214920 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.214932 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.214952 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.214965 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:06Z","lastTransitionTime":"2026-02-27T08:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.264230 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovnkube-controller/0.log" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.266438 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-acl-logging/0.log" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.267325 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerStarted","Data":"39cd97ee86ff77b090c7fc16dfeffd5b4fe4c707dee512418e2c6d49e84c9d3c"} Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.267901 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.287641 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:06Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.300644 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:06Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.315140 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:06Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.317384 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.317441 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.317453 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.317477 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.317494 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:06Z","lastTransitionTime":"2026-02-27T08:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.334816 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:06Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.349293 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:06Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.364073 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:06Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.379732 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:06Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.398924 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39cd97ee86ff77b090c7fc16dfeffd5b4fe4c707dee512418e2c6d49e84c9d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b95afde07822153b75cf5604afa0207fc62112222cd067bd14fcd9057c790d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:04.996775 6688 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0227 08:30:04.996815 6688 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0227 08:30:04.996833 6688 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0227 08:30:04.996856 6688 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0227 08:30:04.996861 6688 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0227 08:30:04.996939 6688 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0227 08:30:04.996950 6688 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0227 08:30:04.996966 6688 factory.go:656] Stopping watch factory\\\\nI0227 08:30:04.996983 6688 ovnkube.go:599] Stopped ovnkube\\\\nI0227 08:30:04.997023 6688 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0227 08:30:04.997023 6688 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0227 08:30:04.997034 6688 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0227 08:30:04.997042 6688 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0227 08:30:04.997047 6688 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0227 08:30:04.997054 6688 handler.go:208] Removed *v1.Node event handler 2\\\\nI02\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:06Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.413466 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:06Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.419404 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.419439 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.419450 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.419465 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.419477 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:06Z","lastTransitionTime":"2026-02-27T08:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.427901 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:06Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.445059 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:06Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.461473 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:06Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.475061 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:06Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.490803 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:06Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.508866 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:06Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.522185 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.522231 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.522242 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.522264 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.522282 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:06Z","lastTransitionTime":"2026-02-27T08:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.551192 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.551325 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:06 crc kubenswrapper[4906]: E0227 08:30:06.551427 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.551451 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:06 crc kubenswrapper[4906]: E0227 08:30:06.551662 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.551829 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:06 crc kubenswrapper[4906]: E0227 08:30:06.551824 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:06 crc kubenswrapper[4906]: E0227 08:30:06.552199 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.625020 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.625077 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.625088 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.625101 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.625111 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:06Z","lastTransitionTime":"2026-02-27T08:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.727616 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.727677 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.727694 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.727723 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.727738 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:06Z","lastTransitionTime":"2026-02-27T08:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.830928 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.831008 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.831028 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.831059 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.831081 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:06Z","lastTransitionTime":"2026-02-27T08:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.933952 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.933999 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.934015 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.934036 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:06 crc kubenswrapper[4906]: I0227 08:30:06.934052 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:06Z","lastTransitionTime":"2026-02-27T08:30:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.036903 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.036946 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.036962 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.037013 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.037030 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:07Z","lastTransitionTime":"2026-02-27T08:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.140320 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.140393 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.140407 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.140433 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.140449 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:07Z","lastTransitionTime":"2026-02-27T08:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.243769 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.243841 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.243856 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.243925 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.243949 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:07Z","lastTransitionTime":"2026-02-27T08:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.347089 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.347998 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.348215 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.348363 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.348664 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:07Z","lastTransitionTime":"2026-02-27T08:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.458354 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.458428 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.458441 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.458460 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.458473 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:07Z","lastTransitionTime":"2026-02-27T08:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.561671 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.561723 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.561737 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.561760 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.561774 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:07Z","lastTransitionTime":"2026-02-27T08:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.664789 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.664835 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.664851 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.664871 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.664904 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:07Z","lastTransitionTime":"2026-02-27T08:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.768127 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.768241 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.768262 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.768288 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.768306 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:07Z","lastTransitionTime":"2026-02-27T08:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.871484 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.871551 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.871568 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.871590 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.871607 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:07Z","lastTransitionTime":"2026-02-27T08:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.975362 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.975438 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.975450 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.975471 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:07 crc kubenswrapper[4906]: I0227 08:30:07.975484 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:07Z","lastTransitionTime":"2026-02-27T08:30:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.082139 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.082203 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.082217 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.082242 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.082257 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:08Z","lastTransitionTime":"2026-02-27T08:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.185852 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.185943 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.185958 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.185977 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.185990 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:08Z","lastTransitionTime":"2026-02-27T08:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.278068 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovnkube-controller/1.log" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.279321 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovnkube-controller/0.log" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.281940 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-acl-logging/0.log" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.283114 4906 generic.go:334] "Generic (PLEG): container finished" podID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerID="39cd97ee86ff77b090c7fc16dfeffd5b4fe4c707dee512418e2c6d49e84c9d3c" exitCode=1 Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.283168 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerDied","Data":"39cd97ee86ff77b090c7fc16dfeffd5b4fe4c707dee512418e2c6d49e84c9d3c"} Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.283270 4906 scope.go:117] "RemoveContainer" containerID="22b95afde07822153b75cf5604afa0207fc62112222cd067bd14fcd9057c790d" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.284245 4906 scope.go:117] "RemoveContainer" containerID="39cd97ee86ff77b090c7fc16dfeffd5b4fe4c707dee512418e2c6d49e84c9d3c" Feb 27 08:30:08 crc kubenswrapper[4906]: E0227 08:30:08.284618 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.288628 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.289022 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.289034 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.289053 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.289073 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:08Z","lastTransitionTime":"2026-02-27T08:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.302373 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:08Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.321392 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:08Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.337024 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:08Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.356048 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:08Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.374276 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:08Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.389442 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:08Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.391295 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.391401 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.391466 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.391533 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.391593 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:08Z","lastTransitionTime":"2026-02-27T08:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.407341 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:08Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.423481 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:08Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.437957 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:08Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.450724 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:08Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.466605 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:08Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.483512 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:08Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.495005 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.495053 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.495066 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.495082 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.495094 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:08Z","lastTransitionTime":"2026-02-27T08:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.497506 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:08Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.528681 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39cd97ee86ff77b090c7fc16dfeffd5b4fe4c707dee512418e2c6d49e84c9d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b95afde07822153b75cf5604afa0207fc62112222cd067bd14fcd9057c790d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:04.996775 6688 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0227 08:30:04.996815 6688 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0227 08:30:04.996833 6688 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0227 08:30:04.996856 6688 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0227 08:30:04.996861 6688 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0227 08:30:04.996939 6688 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0227 08:30:04.996950 6688 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0227 08:30:04.996966 6688 factory.go:656] Stopping watch factory\\\\nI0227 08:30:04.996983 6688 ovnkube.go:599] Stopped ovnkube\\\\nI0227 08:30:04.997023 6688 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0227 08:30:04.997023 6688 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0227 08:30:04.997034 6688 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0227 08:30:04.997042 6688 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0227 08:30:04.997047 6688 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0227 08:30:04.997054 6688 handler.go:208] Removed *v1.Node event handler 2\\\\nI02\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cd97ee86ff77b090c7fc16dfeffd5b4fe4c707dee512418e2c6d49e84c9d3c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:07Z\\\",\\\"message\\\":\\\"nalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.188\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0227 08:30:06.510595 6888 services_controller.go:452] Built service openshift-etcd-operator/metrics per-node LB for network=default: []services.LB{}\\\\nF0227 08:30:06.510601 6888 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: fai\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:08Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.545485 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:08Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.551846 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:08 crc kubenswrapper[4906]: E0227 08:30:08.552041 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.552324 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:08 crc kubenswrapper[4906]: E0227 08:30:08.552549 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.552661 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:08 crc kubenswrapper[4906]: E0227 08:30:08.552819 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.553199 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:08 crc kubenswrapper[4906]: E0227 08:30:08.553445 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.598390 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.598467 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.598490 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.598523 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.598548 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:08Z","lastTransitionTime":"2026-02-27T08:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.701521 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.701581 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.701590 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.701602 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.701612 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:08Z","lastTransitionTime":"2026-02-27T08:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.804447 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.804488 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.804499 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.804516 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.804527 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:08Z","lastTransitionTime":"2026-02-27T08:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.907446 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.907792 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.908071 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.908298 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:08 crc kubenswrapper[4906]: I0227 08:30:08.908385 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:08Z","lastTransitionTime":"2026-02-27T08:30:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.012892 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.012944 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.012959 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.012981 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.012995 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:09Z","lastTransitionTime":"2026-02-27T08:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.116812 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.116901 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.116949 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.116974 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.116996 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:09Z","lastTransitionTime":"2026-02-27T08:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.219232 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.219276 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.219289 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.219304 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.219313 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:09Z","lastTransitionTime":"2026-02-27T08:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.288861 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovnkube-controller/1.log" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.292619 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-acl-logging/0.log" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.321942 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.322115 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.322148 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.322176 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.322194 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:09Z","lastTransitionTime":"2026-02-27T08:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.425634 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.426837 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.427105 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.427124 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.427135 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:09Z","lastTransitionTime":"2026-02-27T08:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.530934 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.530983 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.530996 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.531013 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.531025 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:09Z","lastTransitionTime":"2026-02-27T08:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.634629 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.634916 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.635012 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.635104 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.635179 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:09Z","lastTransitionTime":"2026-02-27T08:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.738421 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.738525 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.738539 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.738858 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.738903 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:09Z","lastTransitionTime":"2026-02-27T08:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.842467 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.842516 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.842529 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.842552 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.842568 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:09Z","lastTransitionTime":"2026-02-27T08:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.946134 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.946187 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.946199 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.946216 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:09 crc kubenswrapper[4906]: I0227 08:30:09.946228 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:09Z","lastTransitionTime":"2026-02-27T08:30:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.049249 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.049281 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.049292 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.049307 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.049319 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:10Z","lastTransitionTime":"2026-02-27T08:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.151163 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.151196 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.151205 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.151219 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.151228 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:10Z","lastTransitionTime":"2026-02-27T08:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.253471 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.253850 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.253947 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.254018 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.254088 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:10Z","lastTransitionTime":"2026-02-27T08:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.295345 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:30:10 crc kubenswrapper[4906]: E0227 08:30:10.295709 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:30:26.295655686 +0000 UTC m=+124.690057336 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.295875 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.296001 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:10 crc kubenswrapper[4906]: E0227 08:30:10.296267 4906 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 27 08:30:10 crc kubenswrapper[4906]: E0227 08:30:10.296343 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-27 08:30:26.296325875 +0000 UTC m=+124.690727515 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 27 08:30:10 crc kubenswrapper[4906]: E0227 08:30:10.296384 4906 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 27 08:30:10 crc kubenswrapper[4906]: E0227 08:30:10.296461 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-27 08:30:26.296440368 +0000 UTC m=+124.690842188 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.357268 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.357524 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.357610 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.357680 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.357738 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:10Z","lastTransitionTime":"2026-02-27T08:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.396943 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:10 crc kubenswrapper[4906]: E0227 08:30:10.397154 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 27 08:30:10 crc kubenswrapper[4906]: E0227 08:30:10.397182 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 27 08:30:10 crc kubenswrapper[4906]: E0227 08:30:10.397196 4906 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:30:10 crc kubenswrapper[4906]: E0227 08:30:10.397259 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-27 08:30:26.397240793 +0000 UTC m=+124.791642403 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.397441 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs\") pod \"network-metrics-daemon-6rvgh\" (UID: \"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\") " pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.397560 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:10 crc kubenswrapper[4906]: E0227 08:30:10.397515 4906 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 27 08:30:10 crc kubenswrapper[4906]: E0227 08:30:10.397715 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs podName:9bb2ded7-f8fe-4978-81cd-08cafe0fe124 nodeName:}" failed. No retries permitted until 2026-02-27 08:30:26.397694506 +0000 UTC m=+124.792096116 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs") pod "network-metrics-daemon-6rvgh" (UID: "9bb2ded7-f8fe-4978-81cd-08cafe0fe124") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 27 08:30:10 crc kubenswrapper[4906]: E0227 08:30:10.397867 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 27 08:30:10 crc kubenswrapper[4906]: E0227 08:30:10.397984 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 27 08:30:10 crc kubenswrapper[4906]: E0227 08:30:10.398065 4906 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:30:10 crc kubenswrapper[4906]: E0227 08:30:10.398165 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-27 08:30:26.398154029 +0000 UTC m=+124.792555639 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.460377 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.460989 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.461087 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.461176 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.461320 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:10Z","lastTransitionTime":"2026-02-27T08:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.552041 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:10 crc kubenswrapper[4906]: E0227 08:30:10.552201 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.552480 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.552055 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:10 crc kubenswrapper[4906]: E0227 08:30:10.552743 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.552856 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:10 crc kubenswrapper[4906]: E0227 08:30:10.553033 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:10 crc kubenswrapper[4906]: E0227 08:30:10.553040 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.565429 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.565743 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.566090 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.566354 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.566534 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:10Z","lastTransitionTime":"2026-02-27T08:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.669772 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.669822 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.669834 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.669854 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.669866 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:10Z","lastTransitionTime":"2026-02-27T08:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.772199 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.772244 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.772255 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.772269 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.772280 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:10Z","lastTransitionTime":"2026-02-27T08:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.876052 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.876121 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.876144 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.876176 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.876200 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:10Z","lastTransitionTime":"2026-02-27T08:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.978948 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.978997 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.979007 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.979020 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:10 crc kubenswrapper[4906]: I0227 08:30:10.979031 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:10Z","lastTransitionTime":"2026-02-27T08:30:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.082219 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.082718 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.082858 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.083001 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.083126 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:11Z","lastTransitionTime":"2026-02-27T08:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.185960 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.186006 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.186018 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.186043 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.186054 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:11Z","lastTransitionTime":"2026-02-27T08:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.288552 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.288625 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.288637 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.288658 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.288671 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:11Z","lastTransitionTime":"2026-02-27T08:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.391375 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.391446 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.391466 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.391496 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.391517 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:11Z","lastTransitionTime":"2026-02-27T08:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.494836 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.495479 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.495788 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.496122 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.496371 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:11Z","lastTransitionTime":"2026-02-27T08:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.599410 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.599490 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.599503 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.599525 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.599537 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:11Z","lastTransitionTime":"2026-02-27T08:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.703076 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.703442 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.703565 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.703657 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.703730 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:11Z","lastTransitionTime":"2026-02-27T08:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.807766 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.807833 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.807861 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.807926 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.807952 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:11Z","lastTransitionTime":"2026-02-27T08:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.911365 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.911436 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.911454 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.911488 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:11 crc kubenswrapper[4906]: I0227 08:30:11.911511 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:11Z","lastTransitionTime":"2026-02-27T08:30:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.014603 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.015108 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.015267 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.015447 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.015627 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:12Z","lastTransitionTime":"2026-02-27T08:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.119246 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.119582 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.119652 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.119765 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.119898 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:12Z","lastTransitionTime":"2026-02-27T08:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.222486 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.222855 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.222960 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.223027 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.223086 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:12Z","lastTransitionTime":"2026-02-27T08:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.326342 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.326418 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.326439 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.326467 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.326485 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:12Z","lastTransitionTime":"2026-02-27T08:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.429746 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.429822 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.429842 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.429870 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.429925 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:12Z","lastTransitionTime":"2026-02-27T08:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.533078 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.533139 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.533151 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.533175 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.533192 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:12Z","lastTransitionTime":"2026-02-27T08:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.552313 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:12 crc kubenswrapper[4906]: E0227 08:30:12.552552 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.552591 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.552668 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.552719 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:12 crc kubenswrapper[4906]: E0227 08:30:12.552864 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:12 crc kubenswrapper[4906]: E0227 08:30:12.553046 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:12 crc kubenswrapper[4906]: E0227 08:30:12.553468 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.553746 4906 scope.go:117] "RemoveContainer" containerID="235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.572504 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.585179 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.597042 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.608322 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.622536 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.637091 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.637188 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.637207 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.637238 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.637266 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:12Z","lastTransitionTime":"2026-02-27T08:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.637261 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.651844 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.667579 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.689957 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39cd97ee86ff77b090c7fc16dfeffd5b4fe4c707dee512418e2c6d49e84c9d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b95afde07822153b75cf5604afa0207fc62112222cd067bd14fcd9057c790d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:04.996775 6688 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0227 08:30:04.996815 6688 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0227 08:30:04.996833 6688 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0227 08:30:04.996856 6688 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0227 08:30:04.996861 6688 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0227 08:30:04.996939 6688 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0227 08:30:04.996950 6688 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0227 08:30:04.996966 6688 factory.go:656] Stopping watch factory\\\\nI0227 08:30:04.996983 6688 ovnkube.go:599] Stopped ovnkube\\\\nI0227 08:30:04.997023 6688 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0227 08:30:04.997023 6688 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0227 08:30:04.997034 6688 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0227 08:30:04.997042 6688 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0227 08:30:04.997047 6688 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0227 08:30:04.997054 6688 handler.go:208] Removed *v1.Node event handler 2\\\\nI02\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cd97ee86ff77b090c7fc16dfeffd5b4fe4c707dee512418e2c6d49e84c9d3c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:07Z\\\",\\\"message\\\":\\\"nalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.188\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0227 08:30:06.510595 6888 services_controller.go:452] Built service openshift-etcd-operator/metrics per-node LB for network=default: []services.LB{}\\\\nF0227 08:30:06.510601 6888 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: fai\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.706831 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.722361 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.739235 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.740568 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.740621 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.740633 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.740647 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.740659 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:12Z","lastTransitionTime":"2026-02-27T08:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.754906 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.772942 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.787638 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.842976 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.843008 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.843021 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.843039 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.843052 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:12Z","lastTransitionTime":"2026-02-27T08:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.948941 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.949436 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.949460 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.949479 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:12 crc kubenswrapper[4906]: I0227 08:30:12.949492 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:12Z","lastTransitionTime":"2026-02-27T08:30:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.051546 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.051580 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.051588 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.051603 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.051613 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:13Z","lastTransitionTime":"2026-02-27T08:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.154690 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.154736 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.154746 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.154763 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.154776 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:13Z","lastTransitionTime":"2026-02-27T08:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.257823 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.257862 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.257873 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.257940 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.257953 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:13Z","lastTransitionTime":"2026-02-27T08:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.316002 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/3.log" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.317842 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378"} Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.318820 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.334229 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:13Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.349203 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:13Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.361097 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.361140 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.361150 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.361197 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.361217 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:13Z","lastTransitionTime":"2026-02-27T08:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.365680 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:13Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.379041 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:13Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.398041 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:13Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.420511 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:13Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.436610 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:13Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.455127 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:13Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.464206 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.464278 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.464293 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.464321 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.464337 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:13Z","lastTransitionTime":"2026-02-27T08:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.475489 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:13Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.497765 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:13Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.519669 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:13Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.545832 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39cd97ee86ff77b090c7fc16dfeffd5b4fe4c707dee512418e2c6d49e84c9d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://22b95afde07822153b75cf5604afa0207fc62112222cd067bd14fcd9057c790d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:04.996775 6688 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0227 08:30:04.996815 6688 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0227 08:30:04.996833 6688 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0227 08:30:04.996856 6688 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0227 08:30:04.996861 6688 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0227 08:30:04.996939 6688 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0227 08:30:04.996950 6688 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0227 08:30:04.996966 6688 factory.go:656] Stopping watch factory\\\\nI0227 08:30:04.996983 6688 ovnkube.go:599] Stopped ovnkube\\\\nI0227 08:30:04.997023 6688 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0227 08:30:04.997023 6688 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0227 08:30:04.997034 6688 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0227 08:30:04.997042 6688 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0227 08:30:04.997047 6688 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0227 08:30:04.997054 6688 handler.go:208] Removed *v1.Node event handler 2\\\\nI02\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:01Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cd97ee86ff77b090c7fc16dfeffd5b4fe4c707dee512418e2c6d49e84c9d3c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:07Z\\\",\\\"message\\\":\\\"nalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.188\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0227 08:30:06.510595 6888 services_controller.go:452] Built service openshift-etcd-operator/metrics per-node LB for network=default: []services.LB{}\\\\nF0227 08:30:06.510601 6888 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: fai\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:13Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.565493 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:13Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.567200 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.567256 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.567267 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.567286 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.567296 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:13Z","lastTransitionTime":"2026-02-27T08:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.582858 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:13Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.596389 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:13Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.669635 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.669683 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.669702 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.669724 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.669737 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:13Z","lastTransitionTime":"2026-02-27T08:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.772443 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.772486 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.772498 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.772516 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.772528 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:13Z","lastTransitionTime":"2026-02-27T08:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.875463 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.875519 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.875530 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.875550 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.875562 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:13Z","lastTransitionTime":"2026-02-27T08:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.978894 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.978948 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.978957 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.978974 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:13 crc kubenswrapper[4906]: I0227 08:30:13.978986 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:13Z","lastTransitionTime":"2026-02-27T08:30:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.083539 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.083601 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.083609 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.083628 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.083642 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:14Z","lastTransitionTime":"2026-02-27T08:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.186845 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.186933 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.186949 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.186976 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.186988 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:14Z","lastTransitionTime":"2026-02-27T08:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.290038 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.290098 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.290110 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.290132 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.290144 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:14Z","lastTransitionTime":"2026-02-27T08:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.393383 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.393440 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.393453 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.393473 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.393485 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:14Z","lastTransitionTime":"2026-02-27T08:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.495874 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.495937 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.495948 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.495967 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.495981 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:14Z","lastTransitionTime":"2026-02-27T08:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.551166 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.551210 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.551336 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:14 crc kubenswrapper[4906]: E0227 08:30:14.551478 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.551923 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:14 crc kubenswrapper[4906]: E0227 08:30:14.552001 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:14 crc kubenswrapper[4906]: E0227 08:30:14.552068 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:14 crc kubenswrapper[4906]: E0227 08:30:14.552128 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.598869 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.599161 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.599300 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.599401 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.599487 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:14Z","lastTransitionTime":"2026-02-27T08:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.702673 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.703133 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.703237 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.703331 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.703416 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:14Z","lastTransitionTime":"2026-02-27T08:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.806097 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.806144 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.806155 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.806172 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.806184 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:14Z","lastTransitionTime":"2026-02-27T08:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.909537 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.909600 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.909618 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.909647 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:14 crc kubenswrapper[4906]: I0227 08:30:14.909664 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:14Z","lastTransitionTime":"2026-02-27T08:30:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.012552 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.012607 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.012619 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.012638 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.012651 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:15Z","lastTransitionTime":"2026-02-27T08:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.115975 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.116030 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.116050 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.116071 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.116082 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:15Z","lastTransitionTime":"2026-02-27T08:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.218806 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.218925 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.218951 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.218983 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.219006 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:15Z","lastTransitionTime":"2026-02-27T08:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.230684 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.230801 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.230825 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.230855 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.230919 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:15Z","lastTransitionTime":"2026-02-27T08:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:15 crc kubenswrapper[4906]: E0227 08:30:15.253254 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:15Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.259097 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.259152 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.259165 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.259186 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.259203 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:15Z","lastTransitionTime":"2026-02-27T08:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:15 crc kubenswrapper[4906]: E0227 08:30:15.276705 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:15Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.281776 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.281839 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.281851 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.281874 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.281923 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:15Z","lastTransitionTime":"2026-02-27T08:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:15 crc kubenswrapper[4906]: E0227 08:30:15.298048 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:15Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.303180 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.303332 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.303426 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.303517 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.303657 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:15Z","lastTransitionTime":"2026-02-27T08:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:15 crc kubenswrapper[4906]: E0227 08:30:15.317645 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:15Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.323327 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.323367 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.323377 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.323392 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.323406 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:15Z","lastTransitionTime":"2026-02-27T08:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:15 crc kubenswrapper[4906]: E0227 08:30:15.340791 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:15Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:15Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:15 crc kubenswrapper[4906]: E0227 08:30:15.340924 4906 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.343166 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.343207 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.343220 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.343243 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.343256 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:15Z","lastTransitionTime":"2026-02-27T08:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.446048 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.446120 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.446139 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.446163 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.446180 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:15Z","lastTransitionTime":"2026-02-27T08:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.549721 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.549791 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.549806 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.549830 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.549843 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:15Z","lastTransitionTime":"2026-02-27T08:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.653393 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.653486 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.653507 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.653537 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.653557 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:15Z","lastTransitionTime":"2026-02-27T08:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.756927 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.756986 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.756997 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.757015 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.757032 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:15Z","lastTransitionTime":"2026-02-27T08:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.859863 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.859934 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.859949 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.859968 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.859979 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:15Z","lastTransitionTime":"2026-02-27T08:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.964596 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.965220 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.965313 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.965390 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:15 crc kubenswrapper[4906]: I0227 08:30:15.965469 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:15Z","lastTransitionTime":"2026-02-27T08:30:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.069811 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.069927 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.069947 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.069975 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.069995 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:16Z","lastTransitionTime":"2026-02-27T08:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.172964 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.173039 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.173054 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.173078 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.173094 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:16Z","lastTransitionTime":"2026-02-27T08:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.275860 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.275927 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.275941 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.275960 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.275973 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:16Z","lastTransitionTime":"2026-02-27T08:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.379603 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.379653 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.379664 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.379706 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.379717 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:16Z","lastTransitionTime":"2026-02-27T08:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.482571 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.482958 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.483060 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.483149 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.483324 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:16Z","lastTransitionTime":"2026-02-27T08:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.551991 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:16 crc kubenswrapper[4906]: E0227 08:30:16.552383 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.552973 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.553165 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.553442 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:16 crc kubenswrapper[4906]: E0227 08:30:16.553397 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:16 crc kubenswrapper[4906]: E0227 08:30:16.553548 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:16 crc kubenswrapper[4906]: E0227 08:30:16.553966 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.567820 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.585815 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.585918 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.585936 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.585960 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.585980 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:16Z","lastTransitionTime":"2026-02-27T08:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.689215 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.689271 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.689285 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.689307 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.689321 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:16Z","lastTransitionTime":"2026-02-27T08:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.793483 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.793534 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.793550 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.793568 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.793580 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:16Z","lastTransitionTime":"2026-02-27T08:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.897607 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.897662 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.897679 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.897699 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:16 crc kubenswrapper[4906]: I0227 08:30:16.897717 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:16Z","lastTransitionTime":"2026-02-27T08:30:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.000385 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.000455 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.000466 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.000486 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.000503 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:17Z","lastTransitionTime":"2026-02-27T08:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.103649 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.103726 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.103744 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.103764 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.103776 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:17Z","lastTransitionTime":"2026-02-27T08:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.207612 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.207696 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.207713 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.207739 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.207757 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:17Z","lastTransitionTime":"2026-02-27T08:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.310778 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.310821 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.310834 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.310848 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.310858 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:17Z","lastTransitionTime":"2026-02-27T08:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.413427 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.413512 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.413528 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.413549 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.413562 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:17Z","lastTransitionTime":"2026-02-27T08:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.518218 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.518276 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.518293 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.518316 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.518335 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:17Z","lastTransitionTime":"2026-02-27T08:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.621471 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.621553 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.621573 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.621598 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.621619 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:17Z","lastTransitionTime":"2026-02-27T08:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.725456 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.725530 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.725545 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.725567 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.725582 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:17Z","lastTransitionTime":"2026-02-27T08:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.829297 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.829747 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.829857 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.830009 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.830149 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:17Z","lastTransitionTime":"2026-02-27T08:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.934641 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.934714 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.934728 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.934752 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:17 crc kubenswrapper[4906]: I0227 08:30:17.934767 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:17Z","lastTransitionTime":"2026-02-27T08:30:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.038514 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.038768 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.038921 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.039022 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.039108 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:18Z","lastTransitionTime":"2026-02-27T08:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.142118 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.142168 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.142183 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.142202 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.142214 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:18Z","lastTransitionTime":"2026-02-27T08:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.245618 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.245709 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.245729 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.245811 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.245848 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:18Z","lastTransitionTime":"2026-02-27T08:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.349064 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.349125 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.349141 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.349167 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.349184 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:18Z","lastTransitionTime":"2026-02-27T08:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.452505 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.452569 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.452583 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.452604 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.452622 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:18Z","lastTransitionTime":"2026-02-27T08:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.552326 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:18 crc kubenswrapper[4906]: E0227 08:30:18.552513 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.553036 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.553168 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:18 crc kubenswrapper[4906]: E0227 08:30:18.553248 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.553249 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:18 crc kubenswrapper[4906]: E0227 08:30:18.553479 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:18 crc kubenswrapper[4906]: E0227 08:30:18.553636 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.555666 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.555728 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.555742 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.555763 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.555777 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:18Z","lastTransitionTime":"2026-02-27T08:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.659449 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.659505 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.659518 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.659541 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.659552 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:18Z","lastTransitionTime":"2026-02-27T08:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.763051 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.763940 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.764230 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.764455 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.764573 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:18Z","lastTransitionTime":"2026-02-27T08:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.866860 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.867215 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.867336 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.867466 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.867573 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:18Z","lastTransitionTime":"2026-02-27T08:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.971039 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.971075 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.971086 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.971100 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:18 crc kubenswrapper[4906]: I0227 08:30:18.971113 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:18Z","lastTransitionTime":"2026-02-27T08:30:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.073825 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.073873 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.073901 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.073916 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.073926 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:19Z","lastTransitionTime":"2026-02-27T08:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.176110 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.176512 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.176581 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.176669 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.176736 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:19Z","lastTransitionTime":"2026-02-27T08:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.278767 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.278812 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.278823 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.278840 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.278854 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:19Z","lastTransitionTime":"2026-02-27T08:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.381422 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.381482 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.381498 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.381520 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.381537 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:19Z","lastTransitionTime":"2026-02-27T08:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.483960 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.484039 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.484061 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.484089 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.484110 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:19Z","lastTransitionTime":"2026-02-27T08:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.552387 4906 scope.go:117] "RemoveContainer" containerID="39cd97ee86ff77b090c7fc16dfeffd5b4fe4c707dee512418e2c6d49e84c9d3c" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.572738 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:19Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.588022 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.588061 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.588073 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.588089 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.588102 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:19Z","lastTransitionTime":"2026-02-27T08:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.591469 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:19Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.607259 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:19Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.627706 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:19Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.642175 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:19Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.662867 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39cd97ee86ff77b090c7fc16dfeffd5b4fe4c707dee512418e2c6d49e84c9d3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cd97ee86ff77b090c7fc16dfeffd5b4fe4c707dee512418e2c6d49e84c9d3c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:07Z\\\",\\\"message\\\":\\\"nalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.188\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0227 08:30:06.510595 6888 services_controller.go:452] Built service openshift-etcd-operator/metrics per-node LB for network=default: []services.LB{}\\\\nF0227 08:30:06.510601 6888 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: fai\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:19Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.680539 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:19Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.691743 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.691786 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.691798 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.691815 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.692116 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:19Z","lastTransitionTime":"2026-02-27T08:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.693209 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:19Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.706172 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:19Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.723336 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"117fa63b-d143-4455-afa6-4fb6807a6ca0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f8ee8ebc8754ebd9809529017b18da429ecb0f3dc78f2a04aa8a51cbf6fcc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:28:53Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0227 08:28:25.634767 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0227 08:28:25.637698 1 observer_polling.go:159] Starting file observer\\\\nI0227 08:28:25.680142 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0227 08:28:25.684637 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0227 08:28:53.341127 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0227 08:28:53.341316 1 cmd.go:179] failed checking apiserver connectivity: Get \\\\\\\"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager/leases/cluster-policy-controller-lock\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:52Z is after 2026-02-23T05:33:13Z\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66e419c34471770e8be9908460758e1b0b826937ec72511e5e9a8f17f54b2bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3767fd1cf2cf180596e480e92ff8b11ad3be31fcc73fc13b01346423b90b7495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d50669b6d531679c57566e0c22f499125c94662f2d4e51edbb4a208f5a3f34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:19Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.741153 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:19Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.756976 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:19Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.774060 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:19Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.789017 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:19Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.794654 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.794699 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.794713 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.794734 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.794747 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:19Z","lastTransitionTime":"2026-02-27T08:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.802497 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:19Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.820834 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:19Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.902310 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.902362 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.902374 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.902388 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:19 crc kubenswrapper[4906]: I0227 08:30:19.902397 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:19Z","lastTransitionTime":"2026-02-27T08:30:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.005487 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.005547 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.005558 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.005581 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.005594 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:20Z","lastTransitionTime":"2026-02-27T08:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.108377 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.108432 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.108441 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.108456 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.108466 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:20Z","lastTransitionTime":"2026-02-27T08:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.211509 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.211564 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.211581 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.211601 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.211617 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:20Z","lastTransitionTime":"2026-02-27T08:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.314912 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.314946 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.314955 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.314970 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.314981 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:20Z","lastTransitionTime":"2026-02-27T08:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.348737 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovnkube-controller/1.log" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.350808 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-acl-logging/0.log" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.352256 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerStarted","Data":"0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af"} Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.352922 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.372487 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:20Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.384477 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:20Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.395621 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:20Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.412661 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:20Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.416825 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.417008 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.417095 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.417178 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.417270 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:20Z","lastTransitionTime":"2026-02-27T08:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.425748 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:20Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.442204 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:20Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.462795 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:20Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.478624 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:20Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.494895 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:20Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.512190 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"117fa63b-d143-4455-afa6-4fb6807a6ca0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f8ee8ebc8754ebd9809529017b18da429ecb0f3dc78f2a04aa8a51cbf6fcc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:28:53Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0227 08:28:25.634767 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0227 08:28:25.637698 1 observer_polling.go:159] Starting file observer\\\\nI0227 08:28:25.680142 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0227 08:28:25.684637 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0227 08:28:53.341127 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0227 08:28:53.341316 1 cmd.go:179] failed checking apiserver connectivity: Get \\\\\\\"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager/leases/cluster-policy-controller-lock\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:52Z is after 2026-02-23T05:33:13Z\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66e419c34471770e8be9908460758e1b0b826937ec72511e5e9a8f17f54b2bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3767fd1cf2cf180596e480e92ff8b11ad3be31fcc73fc13b01346423b90b7495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d50669b6d531679c57566e0c22f499125c94662f2d4e51edbb4a208f5a3f34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:20Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.519497 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.519552 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.519564 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.519581 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.519615 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:20Z","lastTransitionTime":"2026-02-27T08:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.538590 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cd97ee86ff77b090c7fc16dfeffd5b4fe4c707dee512418e2c6d49e84c9d3c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:07Z\\\",\\\"message\\\":\\\"nalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.188\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0227 08:30:06.510595 6888 services_controller.go:452] Built service openshift-etcd-operator/metrics per-node LB for network=default: []services.LB{}\\\\nF0227 08:30:06.510601 6888 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: fai\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:20Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.551393 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.551553 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.551410 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:20 crc kubenswrapper[4906]: E0227 08:30:20.551782 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:20 crc kubenswrapper[4906]: E0227 08:30:20.551706 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.551722 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:20 crc kubenswrapper[4906]: E0227 08:30:20.552104 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:20 crc kubenswrapper[4906]: E0227 08:30:20.552260 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.555924 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:20Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.570307 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:20Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.586165 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:20Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.603691 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:20Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.617650 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:20Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.622671 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.622838 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.622980 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.623101 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.623203 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:20Z","lastTransitionTime":"2026-02-27T08:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.726417 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.726495 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.726512 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.726533 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.726571 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:20Z","lastTransitionTime":"2026-02-27T08:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.829587 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.829628 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.829636 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.829650 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.829659 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:20Z","lastTransitionTime":"2026-02-27T08:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.932414 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.932459 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.932471 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.932485 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:20 crc kubenswrapper[4906]: I0227 08:30:20.932495 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:20Z","lastTransitionTime":"2026-02-27T08:30:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.035625 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.035788 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.035813 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.035838 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.035859 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:21Z","lastTransitionTime":"2026-02-27T08:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.139038 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.139125 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.139149 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.139181 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.139210 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:21Z","lastTransitionTime":"2026-02-27T08:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.242400 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.242443 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.242455 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.242474 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.242489 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:21Z","lastTransitionTime":"2026-02-27T08:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.347306 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.347597 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.347736 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.347902 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.348018 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:21Z","lastTransitionTime":"2026-02-27T08:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.359135 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovnkube-controller/2.log" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.359853 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovnkube-controller/1.log" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.362582 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-acl-logging/0.log" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.363755 4906 generic.go:334] "Generic (PLEG): container finished" podID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerID="0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af" exitCode=1 Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.364145 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerDied","Data":"0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af"} Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.364253 4906 scope.go:117] "RemoveContainer" containerID="39cd97ee86ff77b090c7fc16dfeffd5b4fe4c707dee512418e2c6d49e84c9d3c" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.365225 4906 scope.go:117] "RemoveContainer" containerID="0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af" Feb 27 08:30:21 crc kubenswrapper[4906]: E0227 08:30:21.365458 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.387300 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39cd97ee86ff77b090c7fc16dfeffd5b4fe4c707dee512418e2c6d49e84c9d3c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:07Z\\\",\\\"message\\\":\\\"nalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-etcd-operator/metrics\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.188\\\\\\\", Port:443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0227 08:30:06.510595 6888 services_controller.go:452] Built service openshift-etcd-operator/metrics per-node LB for network=default: []services.LB{}\\\\nF0227 08:30:06.510601 6888 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: fai\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:20Z\\\",\\\"message\\\":\\\"0.511371 7090 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0227 08:30:20.511396 7090 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0227 08:30:20.511413 7090 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0227 08:30:20.511443 7090 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0227 08:30:20.511534 7090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0227 08:30:20.511546 7090 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0227 08:30:20.511571 7090 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0227 08:30:20.511580 7090 handler.go:208] Removed *v1.Node event handler 2\\\\nI0227 08:30:20.511588 7090 handler.go:208] Removed *v1.Node event handler 7\\\\nI0227 08:30:20.511597 7090 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0227 08:30:20.511605 7090 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0227 08:30:20.511613 7090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0227 08:30:20.512084 7090 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:20.512153 7090 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:20.512708 7090 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:21Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.404535 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:21Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.417080 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:21Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.428797 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:21Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.444382 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"117fa63b-d143-4455-afa6-4fb6807a6ca0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f8ee8ebc8754ebd9809529017b18da429ecb0f3dc78f2a04aa8a51cbf6fcc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:28:53Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0227 08:28:25.634767 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0227 08:28:25.637698 1 observer_polling.go:159] Starting file observer\\\\nI0227 08:28:25.680142 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0227 08:28:25.684637 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0227 08:28:53.341127 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0227 08:28:53.341316 1 cmd.go:179] failed checking apiserver connectivity: Get \\\\\\\"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager/leases/cluster-policy-controller-lock\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:52Z is after 2026-02-23T05:33:13Z\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66e419c34471770e8be9908460758e1b0b826937ec72511e5e9a8f17f54b2bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3767fd1cf2cf180596e480e92ff8b11ad3be31fcc73fc13b01346423b90b7495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d50669b6d531679c57566e0c22f499125c94662f2d4e51edbb4a208f5a3f34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:21Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.451056 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.451097 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.451106 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.451123 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.451133 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:21Z","lastTransitionTime":"2026-02-27T08:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.459122 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:21Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.471167 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:21Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.482995 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:21Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.497604 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:21Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.510774 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:21Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.533162 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:21Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.549345 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:21Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.554055 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.554106 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.554122 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.554178 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.554194 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:21Z","lastTransitionTime":"2026-02-27T08:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.567903 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:21Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.581587 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:21Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.626932 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:21Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.638556 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:21Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.656827 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.656902 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.656917 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.656938 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.656951 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:21Z","lastTransitionTime":"2026-02-27T08:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.759732 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.760042 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.760178 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.760302 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.760417 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:21Z","lastTransitionTime":"2026-02-27T08:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.865284 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.865930 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.865962 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.865993 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.866011 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:21Z","lastTransitionTime":"2026-02-27T08:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.969100 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.969172 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.969184 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.969198 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:21 crc kubenswrapper[4906]: I0227 08:30:21.969209 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:21Z","lastTransitionTime":"2026-02-27T08:30:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.072857 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.072948 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.072963 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.072985 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.073000 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:22Z","lastTransitionTime":"2026-02-27T08:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.176176 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.176233 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.176245 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.176263 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.176276 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:22Z","lastTransitionTime":"2026-02-27T08:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.281060 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.281502 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.281615 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.281693 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.281814 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:22Z","lastTransitionTime":"2026-02-27T08:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.371141 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovnkube-controller/2.log" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.374835 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-acl-logging/0.log" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.376814 4906 scope.go:117] "RemoveContainer" containerID="0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af" Feb 27 08:30:22 crc kubenswrapper[4906]: E0227 08:30:22.377053 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.384836 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.384950 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.384972 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.385000 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.385018 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:22Z","lastTransitionTime":"2026-02-27T08:30:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.397299 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.415637 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.432596 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.451405 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.465979 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.481954 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: E0227 08:30:22.485538 4906 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.498963 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.509603 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.525074 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.539529 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"117fa63b-d143-4455-afa6-4fb6807a6ca0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f8ee8ebc8754ebd9809529017b18da429ecb0f3dc78f2a04aa8a51cbf6fcc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:28:53Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0227 08:28:25.634767 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0227 08:28:25.637698 1 observer_polling.go:159] Starting file observer\\\\nI0227 08:28:25.680142 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0227 08:28:25.684637 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0227 08:28:53.341127 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0227 08:28:53.341316 1 cmd.go:179] failed checking apiserver connectivity: Get \\\\\\\"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager/leases/cluster-policy-controller-lock\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:52Z is after 2026-02-23T05:33:13Z\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66e419c34471770e8be9908460758e1b0b826937ec72511e5e9a8f17f54b2bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3767fd1cf2cf180596e480e92ff8b11ad3be31fcc73fc13b01346423b90b7495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d50669b6d531679c57566e0c22f499125c94662f2d4e51edbb4a208f5a3f34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.551172 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.551194 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.551246 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.551301 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:22 crc kubenswrapper[4906]: E0227 08:30:22.551296 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:22 crc kubenswrapper[4906]: E0227 08:30:22.551429 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:22 crc kubenswrapper[4906]: E0227 08:30:22.551514 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:22 crc kubenswrapper[4906]: E0227 08:30:22.551577 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.559417 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:20Z\\\",\\\"message\\\":\\\"0.511371 7090 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0227 08:30:20.511396 7090 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0227 08:30:20.511413 7090 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0227 08:30:20.511443 7090 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0227 08:30:20.511534 7090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0227 08:30:20.511546 7090 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0227 08:30:20.511571 7090 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0227 08:30:20.511580 7090 handler.go:208] Removed *v1.Node event handler 2\\\\nI0227 08:30:20.511588 7090 handler.go:208] Removed *v1.Node event handler 7\\\\nI0227 08:30:20.511597 7090 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0227 08:30:20.511605 7090 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0227 08:30:20.511613 7090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0227 08:30:20.512084 7090 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:20.512153 7090 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:20.512708 7090 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.572732 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.590238 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.601426 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.613502 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.627292 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.640871 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.651675 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.664968 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.676744 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.687914 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.699423 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.712209 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.722372 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.732669 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"117fa63b-d143-4455-afa6-4fb6807a6ca0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f8ee8ebc8754ebd9809529017b18da429ecb0f3dc78f2a04aa8a51cbf6fcc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:28:53Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0227 08:28:25.634767 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0227 08:28:25.637698 1 observer_polling.go:159] Starting file observer\\\\nI0227 08:28:25.680142 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0227 08:28:25.684637 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0227 08:28:53.341127 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0227 08:28:53.341316 1 cmd.go:179] failed checking apiserver connectivity: Get \\\\\\\"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager/leases/cluster-policy-controller-lock\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:52Z is after 2026-02-23T05:33:13Z\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66e419c34471770e8be9908460758e1b0b826937ec72511e5e9a8f17f54b2bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3767fd1cf2cf180596e480e92ff8b11ad3be31fcc73fc13b01346423b90b7495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d50669b6d531679c57566e0c22f499125c94662f2d4e51edbb4a208f5a3f34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.749431 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:20Z\\\",\\\"message\\\":\\\"0.511371 7090 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0227 08:30:20.511396 7090 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0227 08:30:20.511413 7090 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0227 08:30:20.511443 7090 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0227 08:30:20.511534 7090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0227 08:30:20.511546 7090 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0227 08:30:20.511571 7090 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0227 08:30:20.511580 7090 handler.go:208] Removed *v1.Node event handler 2\\\\nI0227 08:30:20.511588 7090 handler.go:208] Removed *v1.Node event handler 7\\\\nI0227 08:30:20.511597 7090 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0227 08:30:20.511605 7090 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0227 08:30:20.511613 7090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0227 08:30:20.512084 7090 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:20.512153 7090 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:20.512708 7090 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.765300 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.777867 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.788683 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.798737 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.811786 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: I0227 08:30:22.822014 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:22 crc kubenswrapper[4906]: E0227 08:30:22.841026 4906 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 27 08:30:24 crc kubenswrapper[4906]: I0227 08:30:24.551446 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:24 crc kubenswrapper[4906]: I0227 08:30:24.551448 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:24 crc kubenswrapper[4906]: I0227 08:30:24.551572 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:24 crc kubenswrapper[4906]: E0227 08:30:24.551736 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:24 crc kubenswrapper[4906]: I0227 08:30:24.551805 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:24 crc kubenswrapper[4906]: E0227 08:30:24.551868 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:24 crc kubenswrapper[4906]: E0227 08:30:24.552003 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:24 crc kubenswrapper[4906]: E0227 08:30:24.552050 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.609559 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.609631 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.609655 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.609684 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.609705 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:25Z","lastTransitionTime":"2026-02-27T08:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:25 crc kubenswrapper[4906]: E0227 08:30:25.631977 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:25Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.636972 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.637019 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.637033 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.637051 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.637080 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:25Z","lastTransitionTime":"2026-02-27T08:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:25 crc kubenswrapper[4906]: E0227 08:30:25.653986 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:25Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.659446 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.659508 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.659523 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.659544 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.659560 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:25Z","lastTransitionTime":"2026-02-27T08:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:25 crc kubenswrapper[4906]: E0227 08:30:25.677865 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:25Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.682637 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.682694 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.682713 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.682738 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.682756 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:25Z","lastTransitionTime":"2026-02-27T08:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:25 crc kubenswrapper[4906]: E0227 08:30:25.702259 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:25Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.707038 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.707088 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.707104 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.707128 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:25 crc kubenswrapper[4906]: I0227 08:30:25.707145 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:25Z","lastTransitionTime":"2026-02-27T08:30:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:25 crc kubenswrapper[4906]: E0227 08:30:25.726982 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:25Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:25 crc kubenswrapper[4906]: E0227 08:30:25.727240 4906 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 27 08:30:26 crc kubenswrapper[4906]: I0227 08:30:26.389196 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:30:26 crc kubenswrapper[4906]: E0227 08:30:26.389346 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:30:58.389321957 +0000 UTC m=+156.783723567 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:30:26 crc kubenswrapper[4906]: I0227 08:30:26.389396 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:26 crc kubenswrapper[4906]: I0227 08:30:26.389474 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:26 crc kubenswrapper[4906]: E0227 08:30:26.389629 4906 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 27 08:30:26 crc kubenswrapper[4906]: E0227 08:30:26.389663 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-27 08:30:58.389655116 +0000 UTC m=+156.784056796 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 27 08:30:26 crc kubenswrapper[4906]: E0227 08:30:26.389678 4906 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 27 08:30:26 crc kubenswrapper[4906]: E0227 08:30:26.389998 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-27 08:30:58.389969415 +0000 UTC m=+156.784371055 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 27 08:30:26 crc kubenswrapper[4906]: I0227 08:30:26.490849 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs\") pod \"network-metrics-daemon-6rvgh\" (UID: \"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\") " pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:26 crc kubenswrapper[4906]: I0227 08:30:26.490958 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:26 crc kubenswrapper[4906]: I0227 08:30:26.491013 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:26 crc kubenswrapper[4906]: E0227 08:30:26.491168 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 27 08:30:26 crc kubenswrapper[4906]: E0227 08:30:26.491190 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 27 08:30:26 crc kubenswrapper[4906]: E0227 08:30:26.491204 4906 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:30:26 crc kubenswrapper[4906]: E0227 08:30:26.491220 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 27 08:30:26 crc kubenswrapper[4906]: E0227 08:30:26.491309 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 27 08:30:26 crc kubenswrapper[4906]: E0227 08:30:26.491336 4906 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:30:26 crc kubenswrapper[4906]: E0227 08:30:26.491308 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-27 08:30:58.491288134 +0000 UTC m=+156.885689754 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:30:26 crc kubenswrapper[4906]: E0227 08:30:26.491459 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-27 08:30:58.491430928 +0000 UTC m=+156.885832578 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:30:26 crc kubenswrapper[4906]: E0227 08:30:26.491611 4906 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 27 08:30:26 crc kubenswrapper[4906]: E0227 08:30:26.491726 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs podName:9bb2ded7-f8fe-4978-81cd-08cafe0fe124 nodeName:}" failed. No retries permitted until 2026-02-27 08:30:58.491704686 +0000 UTC m=+156.886106336 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs") pod "network-metrics-daemon-6rvgh" (UID: "9bb2ded7-f8fe-4978-81cd-08cafe0fe124") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 27 08:30:26 crc kubenswrapper[4906]: I0227 08:30:26.551557 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:26 crc kubenswrapper[4906]: I0227 08:30:26.551599 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:26 crc kubenswrapper[4906]: I0227 08:30:26.551619 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:26 crc kubenswrapper[4906]: I0227 08:30:26.551660 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:26 crc kubenswrapper[4906]: E0227 08:30:26.551759 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:26 crc kubenswrapper[4906]: E0227 08:30:26.551944 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:26 crc kubenswrapper[4906]: E0227 08:30:26.552076 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:26 crc kubenswrapper[4906]: E0227 08:30:26.552197 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:27 crc kubenswrapper[4906]: E0227 08:30:27.842478 4906 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.551807 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.551839 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.551943 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:28 crc kubenswrapper[4906]: E0227 08:30:28.552123 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.552188 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:28 crc kubenswrapper[4906]: E0227 08:30:28.552573 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:28 crc kubenswrapper[4906]: E0227 08:30:28.552666 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:28 crc kubenswrapper[4906]: E0227 08:30:28.552719 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.682256 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.701092 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:28Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.714744 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:28Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.730873 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:28Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.746115 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:28Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.759483 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:28Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.772917 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:28Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.783913 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:28Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.796049 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:28Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.807261 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"117fa63b-d143-4455-afa6-4fb6807a6ca0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f8ee8ebc8754ebd9809529017b18da429ecb0f3dc78f2a04aa8a51cbf6fcc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:28:53Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0227 08:28:25.634767 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0227 08:28:25.637698 1 observer_polling.go:159] Starting file observer\\\\nI0227 08:28:25.680142 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0227 08:28:25.684637 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0227 08:28:53.341127 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0227 08:28:53.341316 1 cmd.go:179] failed checking apiserver connectivity: Get \\\\\\\"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager/leases/cluster-policy-controller-lock\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:52Z is after 2026-02-23T05:33:13Z\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66e419c34471770e8be9908460758e1b0b826937ec72511e5e9a8f17f54b2bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3767fd1cf2cf180596e480e92ff8b11ad3be31fcc73fc13b01346423b90b7495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d50669b6d531679c57566e0c22f499125c94662f2d4e51edbb4a208f5a3f34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:28Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.827687 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:20Z\\\",\\\"message\\\":\\\"0.511371 7090 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0227 08:30:20.511396 7090 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0227 08:30:20.511413 7090 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0227 08:30:20.511443 7090 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0227 08:30:20.511534 7090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0227 08:30:20.511546 7090 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0227 08:30:20.511571 7090 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0227 08:30:20.511580 7090 handler.go:208] Removed *v1.Node event handler 2\\\\nI0227 08:30:20.511588 7090 handler.go:208] Removed *v1.Node event handler 7\\\\nI0227 08:30:20.511597 7090 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0227 08:30:20.511605 7090 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0227 08:30:20.511613 7090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0227 08:30:20.512084 7090 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:20.512153 7090 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:20.512708 7090 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:28Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.839968 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:28Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.850641 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:28Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.862413 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:28Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.873637 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:28Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.885411 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:28Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:28 crc kubenswrapper[4906]: I0227 08:30:28.897568 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:28Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:30 crc kubenswrapper[4906]: I0227 08:30:30.551467 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:30 crc kubenswrapper[4906]: I0227 08:30:30.551579 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:30 crc kubenswrapper[4906]: I0227 08:30:30.551514 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:30 crc kubenswrapper[4906]: I0227 08:30:30.551511 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:30 crc kubenswrapper[4906]: E0227 08:30:30.551956 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:30 crc kubenswrapper[4906]: E0227 08:30:30.552025 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:30 crc kubenswrapper[4906]: E0227 08:30:30.552070 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:30 crc kubenswrapper[4906]: E0227 08:30:30.552296 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:32 crc kubenswrapper[4906]: I0227 08:30:32.552105 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:32 crc kubenswrapper[4906]: I0227 08:30:32.552130 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:32 crc kubenswrapper[4906]: I0227 08:30:32.552218 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:32 crc kubenswrapper[4906]: I0227 08:30:32.552228 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:32 crc kubenswrapper[4906]: E0227 08:30:32.552628 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:32 crc kubenswrapper[4906]: E0227 08:30:32.552709 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:32 crc kubenswrapper[4906]: E0227 08:30:32.552809 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:32 crc kubenswrapper[4906]: E0227 08:30:32.553015 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:32 crc kubenswrapper[4906]: I0227 08:30:32.578070 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:32Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:32 crc kubenswrapper[4906]: I0227 08:30:32.595098 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:32Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:32 crc kubenswrapper[4906]: I0227 08:30:32.614039 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:32Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:32 crc kubenswrapper[4906]: I0227 08:30:32.633998 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:32Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:32 crc kubenswrapper[4906]: I0227 08:30:32.653996 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:32Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:32 crc kubenswrapper[4906]: I0227 08:30:32.674107 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:32Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:32 crc kubenswrapper[4906]: I0227 08:30:32.689437 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:32Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:32 crc kubenswrapper[4906]: I0227 08:30:32.701036 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:32Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:32 crc kubenswrapper[4906]: I0227 08:30:32.711999 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"117fa63b-d143-4455-afa6-4fb6807a6ca0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f8ee8ebc8754ebd9809529017b18da429ecb0f3dc78f2a04aa8a51cbf6fcc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:28:53Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0227 08:28:25.634767 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0227 08:28:25.637698 1 observer_polling.go:159] Starting file observer\\\\nI0227 08:28:25.680142 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0227 08:28:25.684637 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0227 08:28:53.341127 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0227 08:28:53.341316 1 cmd.go:179] failed checking apiserver connectivity: Get \\\\\\\"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager/leases/cluster-policy-controller-lock\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:52Z is after 2026-02-23T05:33:13Z\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66e419c34471770e8be9908460758e1b0b826937ec72511e5e9a8f17f54b2bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3767fd1cf2cf180596e480e92ff8b11ad3be31fcc73fc13b01346423b90b7495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d50669b6d531679c57566e0c22f499125c94662f2d4e51edbb4a208f5a3f34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:32Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:32 crc kubenswrapper[4906]: I0227 08:30:32.735725 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:20Z\\\",\\\"message\\\":\\\"0.511371 7090 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0227 08:30:20.511396 7090 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0227 08:30:20.511413 7090 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0227 08:30:20.511443 7090 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0227 08:30:20.511534 7090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0227 08:30:20.511546 7090 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0227 08:30:20.511571 7090 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0227 08:30:20.511580 7090 handler.go:208] Removed *v1.Node event handler 2\\\\nI0227 08:30:20.511588 7090 handler.go:208] Removed *v1.Node event handler 7\\\\nI0227 08:30:20.511597 7090 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0227 08:30:20.511605 7090 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0227 08:30:20.511613 7090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0227 08:30:20.512084 7090 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:20.512153 7090 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:20.512708 7090 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:32Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:32 crc kubenswrapper[4906]: I0227 08:30:32.750322 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:32Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:32 crc kubenswrapper[4906]: I0227 08:30:32.760735 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:32Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:32 crc kubenswrapper[4906]: I0227 08:30:32.775871 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:32Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:32 crc kubenswrapper[4906]: I0227 08:30:32.785651 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:32Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:32 crc kubenswrapper[4906]: I0227 08:30:32.797530 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:32Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:32 crc kubenswrapper[4906]: I0227 08:30:32.808670 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:32Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:32 crc kubenswrapper[4906]: E0227 08:30:32.843187 4906 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 27 08:30:33 crc kubenswrapper[4906]: I0227 08:30:33.553023 4906 scope.go:117] "RemoveContainer" containerID="0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af" Feb 27 08:30:33 crc kubenswrapper[4906]: E0227 08:30:33.553393 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" Feb 27 08:30:34 crc kubenswrapper[4906]: I0227 08:30:34.551125 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:34 crc kubenswrapper[4906]: I0227 08:30:34.551134 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:34 crc kubenswrapper[4906]: I0227 08:30:34.551177 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:34 crc kubenswrapper[4906]: I0227 08:30:34.551371 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:34 crc kubenswrapper[4906]: E0227 08:30:34.551376 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:34 crc kubenswrapper[4906]: E0227 08:30:34.551513 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:34 crc kubenswrapper[4906]: E0227 08:30:34.551651 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:34 crc kubenswrapper[4906]: E0227 08:30:34.551778 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:35 crc kubenswrapper[4906]: I0227 08:30:35.911086 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:35 crc kubenswrapper[4906]: I0227 08:30:35.911185 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:35 crc kubenswrapper[4906]: I0227 08:30:35.911202 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:35 crc kubenswrapper[4906]: I0227 08:30:35.911230 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:35 crc kubenswrapper[4906]: I0227 08:30:35.911248 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:35Z","lastTransitionTime":"2026-02-27T08:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:35 crc kubenswrapper[4906]: E0227 08:30:35.928314 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:35Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:35 crc kubenswrapper[4906]: I0227 08:30:35.938446 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:35 crc kubenswrapper[4906]: I0227 08:30:35.938477 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:35 crc kubenswrapper[4906]: I0227 08:30:35.938488 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:35 crc kubenswrapper[4906]: I0227 08:30:35.938508 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:35 crc kubenswrapper[4906]: I0227 08:30:35.938528 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:35Z","lastTransitionTime":"2026-02-27T08:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:35 crc kubenswrapper[4906]: E0227 08:30:35.951910 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:35Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:35 crc kubenswrapper[4906]: I0227 08:30:35.956450 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:35 crc kubenswrapper[4906]: I0227 08:30:35.956570 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:35 crc kubenswrapper[4906]: I0227 08:30:35.956636 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:35 crc kubenswrapper[4906]: I0227 08:30:35.956652 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:35 crc kubenswrapper[4906]: I0227 08:30:35.956691 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:35Z","lastTransitionTime":"2026-02-27T08:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:35 crc kubenswrapper[4906]: E0227 08:30:35.975695 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:35Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:35 crc kubenswrapper[4906]: I0227 08:30:35.979547 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:35 crc kubenswrapper[4906]: I0227 08:30:35.979594 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:35 crc kubenswrapper[4906]: I0227 08:30:35.979610 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:35 crc kubenswrapper[4906]: I0227 08:30:35.979634 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:35 crc kubenswrapper[4906]: I0227 08:30:35.979651 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:35Z","lastTransitionTime":"2026-02-27T08:30:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:35 crc kubenswrapper[4906]: E0227 08:30:35.997214 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:35Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:36 crc kubenswrapper[4906]: I0227 08:30:36.001838 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:36 crc kubenswrapper[4906]: I0227 08:30:36.001904 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:36 crc kubenswrapper[4906]: I0227 08:30:36.001918 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:36 crc kubenswrapper[4906]: I0227 08:30:36.001935 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:36 crc kubenswrapper[4906]: I0227 08:30:36.001947 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:36Z","lastTransitionTime":"2026-02-27T08:30:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:36 crc kubenswrapper[4906]: E0227 08:30:36.018119 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:36Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:36Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:36Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:36Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:36Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:36 crc kubenswrapper[4906]: E0227 08:30:36.018432 4906 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 27 08:30:36 crc kubenswrapper[4906]: I0227 08:30:36.551716 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:36 crc kubenswrapper[4906]: I0227 08:30:36.551730 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:36 crc kubenswrapper[4906]: I0227 08:30:36.551822 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:36 crc kubenswrapper[4906]: I0227 08:30:36.552230 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:36 crc kubenswrapper[4906]: E0227 08:30:36.552516 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:36 crc kubenswrapper[4906]: E0227 08:30:36.552673 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:36 crc kubenswrapper[4906]: E0227 08:30:36.552785 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:36 crc kubenswrapper[4906]: E0227 08:30:36.553051 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:37 crc kubenswrapper[4906]: E0227 08:30:37.844361 4906 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 27 08:30:38 crc kubenswrapper[4906]: I0227 08:30:38.551219 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:38 crc kubenswrapper[4906]: I0227 08:30:38.551248 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:38 crc kubenswrapper[4906]: E0227 08:30:38.551341 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:38 crc kubenswrapper[4906]: I0227 08:30:38.551486 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:38 crc kubenswrapper[4906]: E0227 08:30:38.551542 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:38 crc kubenswrapper[4906]: E0227 08:30:38.551755 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:38 crc kubenswrapper[4906]: I0227 08:30:38.551773 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:38 crc kubenswrapper[4906]: E0227 08:30:38.551919 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:40 crc kubenswrapper[4906]: I0227 08:30:40.551515 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:40 crc kubenswrapper[4906]: I0227 08:30:40.551665 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:40 crc kubenswrapper[4906]: I0227 08:30:40.551728 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:40 crc kubenswrapper[4906]: E0227 08:30:40.551674 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:40 crc kubenswrapper[4906]: I0227 08:30:40.551776 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:40 crc kubenswrapper[4906]: E0227 08:30:40.552041 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:40 crc kubenswrapper[4906]: E0227 08:30:40.552142 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:40 crc kubenswrapper[4906]: E0227 08:30:40.552231 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:42 crc kubenswrapper[4906]: I0227 08:30:42.551579 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:42 crc kubenswrapper[4906]: I0227 08:30:42.551650 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:42 crc kubenswrapper[4906]: I0227 08:30:42.551681 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:42 crc kubenswrapper[4906]: E0227 08:30:42.551736 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:42 crc kubenswrapper[4906]: I0227 08:30:42.551905 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:42 crc kubenswrapper[4906]: E0227 08:30:42.551966 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:42 crc kubenswrapper[4906]: E0227 08:30:42.552024 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:42 crc kubenswrapper[4906]: E0227 08:30:42.552149 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:42 crc kubenswrapper[4906]: I0227 08:30:42.567418 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:42Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:42 crc kubenswrapper[4906]: I0227 08:30:42.580657 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:42Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:42 crc kubenswrapper[4906]: I0227 08:30:42.595071 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:42Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:42 crc kubenswrapper[4906]: I0227 08:30:42.617157 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:42Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:42 crc kubenswrapper[4906]: I0227 08:30:42.629850 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:42Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:42 crc kubenswrapper[4906]: I0227 08:30:42.655099 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:42Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:42 crc kubenswrapper[4906]: I0227 08:30:42.667074 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:42Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:42 crc kubenswrapper[4906]: I0227 08:30:42.679589 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"117fa63b-d143-4455-afa6-4fb6807a6ca0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f8ee8ebc8754ebd9809529017b18da429ecb0f3dc78f2a04aa8a51cbf6fcc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:28:53Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0227 08:28:25.634767 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0227 08:28:25.637698 1 observer_polling.go:159] Starting file observer\\\\nI0227 08:28:25.680142 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0227 08:28:25.684637 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0227 08:28:53.341127 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0227 08:28:53.341316 1 cmd.go:179] failed checking apiserver connectivity: Get \\\\\\\"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager/leases/cluster-policy-controller-lock\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:52Z is after 2026-02-23T05:33:13Z\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66e419c34471770e8be9908460758e1b0b826937ec72511e5e9a8f17f54b2bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3767fd1cf2cf180596e480e92ff8b11ad3be31fcc73fc13b01346423b90b7495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d50669b6d531679c57566e0c22f499125c94662f2d4e51edbb4a208f5a3f34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:42Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:42 crc kubenswrapper[4906]: I0227 08:30:42.703660 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:20Z\\\",\\\"message\\\":\\\"0.511371 7090 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0227 08:30:20.511396 7090 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0227 08:30:20.511413 7090 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0227 08:30:20.511443 7090 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0227 08:30:20.511534 7090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0227 08:30:20.511546 7090 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0227 08:30:20.511571 7090 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0227 08:30:20.511580 7090 handler.go:208] Removed *v1.Node event handler 2\\\\nI0227 08:30:20.511588 7090 handler.go:208] Removed *v1.Node event handler 7\\\\nI0227 08:30:20.511597 7090 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0227 08:30:20.511605 7090 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0227 08:30:20.511613 7090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0227 08:30:20.512084 7090 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:20.512153 7090 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:20.512708 7090 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:42Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:42 crc kubenswrapper[4906]: I0227 08:30:42.714206 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:42Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:42 crc kubenswrapper[4906]: I0227 08:30:42.725848 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:42Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:42 crc kubenswrapper[4906]: I0227 08:30:42.734539 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:42Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:42 crc kubenswrapper[4906]: I0227 08:30:42.744837 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:42Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:42 crc kubenswrapper[4906]: I0227 08:30:42.755328 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:42Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:42 crc kubenswrapper[4906]: I0227 08:30:42.765954 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:42Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:42 crc kubenswrapper[4906]: I0227 08:30:42.776107 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:42Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:42 crc kubenswrapper[4906]: E0227 08:30:42.847949 4906 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.460305 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6nxxh_a961de01-e505-4c80-96a0-333da958a633/kube-multus/0.log" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.460356 4906 generic.go:334] "Generic (PLEG): container finished" podID="a961de01-e505-4c80-96a0-333da958a633" containerID="ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26" exitCode=1 Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.460414 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-6nxxh" event={"ID":"a961de01-e505-4c80-96a0-333da958a633","Type":"ContainerDied","Data":"ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26"} Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.460937 4906 scope.go:117] "RemoveContainer" containerID="ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.483390 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"117fa63b-d143-4455-afa6-4fb6807a6ca0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f8ee8ebc8754ebd9809529017b18da429ecb0f3dc78f2a04aa8a51cbf6fcc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:28:53Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0227 08:28:25.634767 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0227 08:28:25.637698 1 observer_polling.go:159] Starting file observer\\\\nI0227 08:28:25.680142 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0227 08:28:25.684637 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0227 08:28:53.341127 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0227 08:28:53.341316 1 cmd.go:179] failed checking apiserver connectivity: Get \\\\\\\"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager/leases/cluster-policy-controller-lock\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:52Z is after 2026-02-23T05:33:13Z\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66e419c34471770e8be9908460758e1b0b826937ec72511e5e9a8f17f54b2bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3767fd1cf2cf180596e480e92ff8b11ad3be31fcc73fc13b01346423b90b7495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d50669b6d531679c57566e0c22f499125c94662f2d4e51edbb4a208f5a3f34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:44Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.514827 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:20Z\\\",\\\"message\\\":\\\"0.511371 7090 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0227 08:30:20.511396 7090 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0227 08:30:20.511413 7090 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0227 08:30:20.511443 7090 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0227 08:30:20.511534 7090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0227 08:30:20.511546 7090 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0227 08:30:20.511571 7090 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0227 08:30:20.511580 7090 handler.go:208] Removed *v1.Node event handler 2\\\\nI0227 08:30:20.511588 7090 handler.go:208] Removed *v1.Node event handler 7\\\\nI0227 08:30:20.511597 7090 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0227 08:30:20.511605 7090 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0227 08:30:20.511613 7090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0227 08:30:20.512084 7090 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:20.512153 7090 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:20.512708 7090 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:44Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.532324 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:43Z\\\",\\\"message\\\":\\\"2026-02-27T08:29:57+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_80f7ef0c-7919-495d-ba74-c18db36ce5c7\\\\n2026-02-27T08:29:57+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_80f7ef0c-7919-495d-ba74-c18db36ce5c7 to /host/opt/cni/bin/\\\\n2026-02-27T08:29:58Z [verbose] multus-daemon started\\\\n2026-02-27T08:29:58Z [verbose] Readiness Indicator file check\\\\n2026-02-27T08:30:43Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:44Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.546481 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:44Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.551600 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.551621 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.551709 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.551895 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:44 crc kubenswrapper[4906]: E0227 08:30:44.552043 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:44 crc kubenswrapper[4906]: E0227 08:30:44.552204 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:44 crc kubenswrapper[4906]: E0227 08:30:44.552348 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:44 crc kubenswrapper[4906]: E0227 08:30:44.552442 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.559826 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:44Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.567039 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.573108 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:44Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.589433 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:44Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.603560 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:44Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.618064 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:44Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.632521 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:44Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.642765 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:44Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.654194 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:44Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.667465 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:44Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.684264 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:44Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.700243 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:44Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:44 crc kubenswrapper[4906]: I0227 08:30:44.717087 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:44Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:45 crc kubenswrapper[4906]: I0227 08:30:45.465025 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6nxxh_a961de01-e505-4c80-96a0-333da958a633/kube-multus/0.log" Feb 27 08:30:45 crc kubenswrapper[4906]: I0227 08:30:45.465496 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-6nxxh" event={"ID":"a961de01-e505-4c80-96a0-333da958a633","Type":"ContainerStarted","Data":"41eedc8fb57fff9ee8587cf308c6bc779385fe45c289d2450f5039e83c71b349"} Feb 27 08:30:45 crc kubenswrapper[4906]: I0227 08:30:45.481269 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"117fa63b-d143-4455-afa6-4fb6807a6ca0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f8ee8ebc8754ebd9809529017b18da429ecb0f3dc78f2a04aa8a51cbf6fcc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:28:53Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0227 08:28:25.634767 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0227 08:28:25.637698 1 observer_polling.go:159] Starting file observer\\\\nI0227 08:28:25.680142 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0227 08:28:25.684637 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0227 08:28:53.341127 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0227 08:28:53.341316 1 cmd.go:179] failed checking apiserver connectivity: Get \\\\\\\"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager/leases/cluster-policy-controller-lock\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:52Z is after 2026-02-23T05:33:13Z\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66e419c34471770e8be9908460758e1b0b826937ec72511e5e9a8f17f54b2bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3767fd1cf2cf180596e480e92ff8b11ad3be31fcc73fc13b01346423b90b7495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d50669b6d531679c57566e0c22f499125c94662f2d4e51edbb4a208f5a3f34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:45Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:45 crc kubenswrapper[4906]: I0227 08:30:45.503414 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:20Z\\\",\\\"message\\\":\\\"0.511371 7090 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0227 08:30:20.511396 7090 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0227 08:30:20.511413 7090 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0227 08:30:20.511443 7090 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0227 08:30:20.511534 7090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0227 08:30:20.511546 7090 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0227 08:30:20.511571 7090 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0227 08:30:20.511580 7090 handler.go:208] Removed *v1.Node event handler 2\\\\nI0227 08:30:20.511588 7090 handler.go:208] Removed *v1.Node event handler 7\\\\nI0227 08:30:20.511597 7090 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0227 08:30:20.511605 7090 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0227 08:30:20.511613 7090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0227 08:30:20.512084 7090 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:20.512153 7090 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:20.512708 7090 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:45Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:45 crc kubenswrapper[4906]: I0227 08:30:45.519956 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41eedc8fb57fff9ee8587cf308c6bc779385fe45c289d2450f5039e83c71b349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:43Z\\\",\\\"message\\\":\\\"2026-02-27T08:29:57+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_80f7ef0c-7919-495d-ba74-c18db36ce5c7\\\\n2026-02-27T08:29:57+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_80f7ef0c-7919-495d-ba74-c18db36ce5c7 to /host/opt/cni/bin/\\\\n2026-02-27T08:29:58Z [verbose] multus-daemon started\\\\n2026-02-27T08:29:58Z [verbose] Readiness Indicator file check\\\\n2026-02-27T08:30:43Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:45Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:45 crc kubenswrapper[4906]: I0227 08:30:45.532049 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:45Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:45 crc kubenswrapper[4906]: I0227 08:30:45.543552 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:45Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:45 crc kubenswrapper[4906]: I0227 08:30:45.552360 4906 scope.go:117] "RemoveContainer" containerID="0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af" Feb 27 08:30:45 crc kubenswrapper[4906]: I0227 08:30:45.557568 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dd39b0c-4ecb-4295-8c9a-0918a2d97319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3065ac6de3ade07cd15c272b286223985df3c354018634e1aef5095952735dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c64bff4f3048eb83cd1d0db774c6448950a550b5074eaf9186244cea8344879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b7a7e5b59d69cc97b3ad6f3d339eb5fe1edcfe4b9c46af545f363773fc6b322\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aad657adbeab12281a0a6c5286d238cc8f9921f9674fcbfe0e7370c53ee9f389\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aad657adbeab12281a0a6c5286d238cc8f9921f9674fcbfe0e7370c53ee9f389\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:45Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:45 crc kubenswrapper[4906]: I0227 08:30:45.572237 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:45Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:45 crc kubenswrapper[4906]: I0227 08:30:45.591119 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:45Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:45 crc kubenswrapper[4906]: I0227 08:30:45.603214 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:45Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:45 crc kubenswrapper[4906]: I0227 08:30:45.616598 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:45Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:45 crc kubenswrapper[4906]: I0227 08:30:45.632030 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:45Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:45 crc kubenswrapper[4906]: I0227 08:30:45.644932 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:45Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:45 crc kubenswrapper[4906]: I0227 08:30:45.664116 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:45Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:45 crc kubenswrapper[4906]: I0227 08:30:45.678895 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:45Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:45 crc kubenswrapper[4906]: I0227 08:30:45.691294 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:45Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:45 crc kubenswrapper[4906]: I0227 08:30:45.705068 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:45Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:45 crc kubenswrapper[4906]: I0227 08:30:45.717304 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:45Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.027811 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.027842 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.027853 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.027869 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.027905 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:46Z","lastTransitionTime":"2026-02-27T08:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:46 crc kubenswrapper[4906]: E0227 08:30:46.042610 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.047489 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.047534 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.047551 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.047579 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.047596 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:46Z","lastTransitionTime":"2026-02-27T08:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:46 crc kubenswrapper[4906]: E0227 08:30:46.066040 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.071531 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.071562 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.071571 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.071592 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.071602 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:46Z","lastTransitionTime":"2026-02-27T08:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:46 crc kubenswrapper[4906]: E0227 08:30:46.083997 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.087654 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.087689 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.087698 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.087713 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.087723 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:46Z","lastTransitionTime":"2026-02-27T08:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:46 crc kubenswrapper[4906]: E0227 08:30:46.099133 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.102450 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.102493 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.102512 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.102540 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.102552 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:46Z","lastTransitionTime":"2026-02-27T08:30:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:46 crc kubenswrapper[4906]: E0227 08:30:46.118708 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: E0227 08:30:46.118822 4906 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.472831 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovnkube-controller/2.log" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.476203 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-acl-logging/0.log" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.486453 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerStarted","Data":"bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18"} Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.492569 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.508072 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.520704 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.534403 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.551411 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.552137 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.552272 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:46 crc kubenswrapper[4906]: E0227 08:30:46.552338 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.552137 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:46 crc kubenswrapper[4906]: E0227 08:30:46.552477 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:46 crc kubenswrapper[4906]: E0227 08:30:46.552619 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.552897 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:46 crc kubenswrapper[4906]: E0227 08:30:46.553002 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.565931 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.579984 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.594841 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"117fa63b-d143-4455-afa6-4fb6807a6ca0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f8ee8ebc8754ebd9809529017b18da429ecb0f3dc78f2a04aa8a51cbf6fcc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:28:53Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0227 08:28:25.634767 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0227 08:28:25.637698 1 observer_polling.go:159] Starting file observer\\\\nI0227 08:28:25.680142 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0227 08:28:25.684637 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0227 08:28:53.341127 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0227 08:28:53.341316 1 cmd.go:179] failed checking apiserver connectivity: Get \\\\\\\"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager/leases/cluster-policy-controller-lock\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:52Z is after 2026-02-23T05:33:13Z\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66e419c34471770e8be9908460758e1b0b826937ec72511e5e9a8f17f54b2bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3767fd1cf2cf180596e480e92ff8b11ad3be31fcc73fc13b01346423b90b7495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d50669b6d531679c57566e0c22f499125c94662f2d4e51edbb4a208f5a3f34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.617710 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:20Z\\\",\\\"message\\\":\\\"0.511371 7090 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0227 08:30:20.511396 7090 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0227 08:30:20.511413 7090 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0227 08:30:20.511443 7090 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0227 08:30:20.511534 7090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0227 08:30:20.511546 7090 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0227 08:30:20.511571 7090 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0227 08:30:20.511580 7090 handler.go:208] Removed *v1.Node event handler 2\\\\nI0227 08:30:20.511588 7090 handler.go:208] Removed *v1.Node event handler 7\\\\nI0227 08:30:20.511597 7090 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0227 08:30:20.511605 7090 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0227 08:30:20.511613 7090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0227 08:30:20.512084 7090 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:20.512153 7090 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:20.512708 7090 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.634113 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41eedc8fb57fff9ee8587cf308c6bc779385fe45c289d2450f5039e83c71b349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:43Z\\\",\\\"message\\\":\\\"2026-02-27T08:29:57+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_80f7ef0c-7919-495d-ba74-c18db36ce5c7\\\\n2026-02-27T08:29:57+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_80f7ef0c-7919-495d-ba74-c18db36ce5c7 to /host/opt/cni/bin/\\\\n2026-02-27T08:29:58Z [verbose] multus-daemon started\\\\n2026-02-27T08:29:58Z [verbose] Readiness Indicator file check\\\\n2026-02-27T08:30:43Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.645572 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.665696 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.679443 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.693039 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dd39b0c-4ecb-4295-8c9a-0918a2d97319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3065ac6de3ade07cd15c272b286223985df3c354018634e1aef5095952735dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c64bff4f3048eb83cd1d0db774c6448950a550b5074eaf9186244cea8344879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b7a7e5b59d69cc97b3ad6f3d339eb5fe1edcfe4b9c46af545f363773fc6b322\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aad657adbeab12281a0a6c5286d238cc8f9921f9674fcbfe0e7370c53ee9f389\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aad657adbeab12281a0a6c5286d238cc8f9921f9674fcbfe0e7370c53ee9f389\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.709542 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.728637 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.747501 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:46 crc kubenswrapper[4906]: I0227 08:30:46.764738 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.493755 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovnkube-controller/3.log" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.494593 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovnkube-controller/2.log" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.497728 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-acl-logging/0.log" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.499094 4906 generic.go:334] "Generic (PLEG): container finished" podID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerID="bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18" exitCode=1 Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.499153 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerDied","Data":"bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18"} Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.499205 4906 scope.go:117] "RemoveContainer" containerID="0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.500313 4906 scope.go:117] "RemoveContainer" containerID="bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18" Feb 27 08:30:47 crc kubenswrapper[4906]: E0227 08:30:47.500569 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.522587 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:47Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.537530 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:47Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.558316 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:47Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.571758 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:47Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.593123 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:47Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.611154 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:47Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.624142 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:47Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.641594 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41eedc8fb57fff9ee8587cf308c6bc779385fe45c289d2450f5039e83c71b349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:43Z\\\",\\\"message\\\":\\\"2026-02-27T08:29:57+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_80f7ef0c-7919-495d-ba74-c18db36ce5c7\\\\n2026-02-27T08:29:57+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_80f7ef0c-7919-495d-ba74-c18db36ce5c7 to /host/opt/cni/bin/\\\\n2026-02-27T08:29:58Z [verbose] multus-daemon started\\\\n2026-02-27T08:29:58Z [verbose] Readiness Indicator file check\\\\n2026-02-27T08:30:43Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:47Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.653268 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:47Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.664993 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:47Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.675831 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"117fa63b-d143-4455-afa6-4fb6807a6ca0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f8ee8ebc8754ebd9809529017b18da429ecb0f3dc78f2a04aa8a51cbf6fcc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:28:53Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0227 08:28:25.634767 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0227 08:28:25.637698 1 observer_polling.go:159] Starting file observer\\\\nI0227 08:28:25.680142 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0227 08:28:25.684637 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0227 08:28:53.341127 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0227 08:28:53.341316 1 cmd.go:179] failed checking apiserver connectivity: Get \\\\\\\"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager/leases/cluster-policy-controller-lock\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:52Z is after 2026-02-23T05:33:13Z\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66e419c34471770e8be9908460758e1b0b826937ec72511e5e9a8f17f54b2bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3767fd1cf2cf180596e480e92ff8b11ad3be31fcc73fc13b01346423b90b7495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d50669b6d531679c57566e0c22f499125c94662f2d4e51edbb4a208f5a3f34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:47Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.692955 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0ffee81f0f862efbeb1e9dd3fbc160bfc96b068dff3249e77955461aecb344af\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:20Z\\\",\\\"message\\\":\\\"0.511371 7090 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0227 08:30:20.511396 7090 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0227 08:30:20.511413 7090 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0227 08:30:20.511443 7090 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0227 08:30:20.511534 7090 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0227 08:30:20.511546 7090 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0227 08:30:20.511571 7090 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0227 08:30:20.511580 7090 handler.go:208] Removed *v1.Node event handler 2\\\\nI0227 08:30:20.511588 7090 handler.go:208] Removed *v1.Node event handler 7\\\\nI0227 08:30:20.511597 7090 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0227 08:30:20.511605 7090 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0227 08:30:20.511613 7090 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0227 08:30:20.512084 7090 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:20.512153 7090 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0227 08:30:20.512708 7090 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:19Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"r-operator]} name:Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.245:443: 10.217.5.245:9192:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {54fbe873-7e6d-475f-a0ad-8dd5f06d850d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0227 08:30:46.448860 7409 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z]\\\\nI0227 08:30:46.448834\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:47Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.706660 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:47Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.719385 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:47Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.730192 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:47Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.744529 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:47Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:47 crc kubenswrapper[4906]: I0227 08:30:47.762917 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dd39b0c-4ecb-4295-8c9a-0918a2d97319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3065ac6de3ade07cd15c272b286223985df3c354018634e1aef5095952735dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c64bff4f3048eb83cd1d0db774c6448950a550b5074eaf9186244cea8344879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b7a7e5b59d69cc97b3ad6f3d339eb5fe1edcfe4b9c46af545f363773fc6b322\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aad657adbeab12281a0a6c5286d238cc8f9921f9674fcbfe0e7370c53ee9f389\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aad657adbeab12281a0a6c5286d238cc8f9921f9674fcbfe0e7370c53ee9f389\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:47Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:47 crc kubenswrapper[4906]: E0227 08:30:47.849691 4906 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.506248 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovnkube-controller/3.log" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.510856 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-acl-logging/0.log" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.518355 4906 scope.go:117] "RemoveContainer" containerID="bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18" Feb 27 08:30:48 crc kubenswrapper[4906]: E0227 08:30:48.518772 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.537357 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:48Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.548450 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:48Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.551516 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.551604 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.551670 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:48 crc kubenswrapper[4906]: E0227 08:30:48.551809 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.552030 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:48 crc kubenswrapper[4906]: E0227 08:30:48.552174 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:48 crc kubenswrapper[4906]: E0227 08:30:48.552275 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:48 crc kubenswrapper[4906]: E0227 08:30:48.552356 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.564863 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"117fa63b-d143-4455-afa6-4fb6807a6ca0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f8ee8ebc8754ebd9809529017b18da429ecb0f3dc78f2a04aa8a51cbf6fcc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:28:53Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0227 08:28:25.634767 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0227 08:28:25.637698 1 observer_polling.go:159] Starting file observer\\\\nI0227 08:28:25.680142 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0227 08:28:25.684637 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0227 08:28:53.341127 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0227 08:28:53.341316 1 cmd.go:179] failed checking apiserver connectivity: Get \\\\\\\"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager/leases/cluster-policy-controller-lock\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:52Z is after 2026-02-23T05:33:13Z\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66e419c34471770e8be9908460758e1b0b826937ec72511e5e9a8f17f54b2bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3767fd1cf2cf180596e480e92ff8b11ad3be31fcc73fc13b01346423b90b7495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d50669b6d531679c57566e0c22f499125c94662f2d4e51edbb4a208f5a3f34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:48Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.587770 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"r-operator]} name:Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.245:443: 10.217.5.245:9192:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {54fbe873-7e6d-475f-a0ad-8dd5f06d850d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0227 08:30:46.448860 7409 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z]\\\\nI0227 08:30:46.448834\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:48Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.602204 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41eedc8fb57fff9ee8587cf308c6bc779385fe45c289d2450f5039e83c71b349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:43Z\\\",\\\"message\\\":\\\"2026-02-27T08:29:57+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_80f7ef0c-7919-495d-ba74-c18db36ce5c7\\\\n2026-02-27T08:29:57+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_80f7ef0c-7919-495d-ba74-c18db36ce5c7 to /host/opt/cni/bin/\\\\n2026-02-27T08:29:58Z [verbose] multus-daemon started\\\\n2026-02-27T08:29:58Z [verbose] Readiness Indicator file check\\\\n2026-02-27T08:30:43Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:48Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.616550 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:48Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.629899 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:48Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.641948 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:48Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.652398 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dd39b0c-4ecb-4295-8c9a-0918a2d97319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3065ac6de3ade07cd15c272b286223985df3c354018634e1aef5095952735dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c64bff4f3048eb83cd1d0db774c6448950a550b5074eaf9186244cea8344879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b7a7e5b59d69cc97b3ad6f3d339eb5fe1edcfe4b9c46af545f363773fc6b322\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aad657adbeab12281a0a6c5286d238cc8f9921f9674fcbfe0e7370c53ee9f389\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aad657adbeab12281a0a6c5286d238cc8f9921f9674fcbfe0e7370c53ee9f389\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:48Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.666600 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:48Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.679574 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:48Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.692957 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:48Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.708423 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:48Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.722990 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:48Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.737369 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:48Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.752345 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:48Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:48 crc kubenswrapper[4906]: I0227 08:30:48.773031 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:48Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:50 crc kubenswrapper[4906]: I0227 08:30:50.551758 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:50 crc kubenswrapper[4906]: I0227 08:30:50.551813 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:50 crc kubenswrapper[4906]: E0227 08:30:50.551986 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:50 crc kubenswrapper[4906]: I0227 08:30:50.552089 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:50 crc kubenswrapper[4906]: I0227 08:30:50.552230 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:50 crc kubenswrapper[4906]: E0227 08:30:50.552527 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:50 crc kubenswrapper[4906]: E0227 08:30:50.552837 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:50 crc kubenswrapper[4906]: E0227 08:30:50.553112 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.551614 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.551754 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:52 crc kubenswrapper[4906]: E0227 08:30:52.551922 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.552060 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.552122 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:52 crc kubenswrapper[4906]: E0227 08:30:52.552353 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:52 crc kubenswrapper[4906]: E0227 08:30:52.552488 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:52 crc kubenswrapper[4906]: E0227 08:30:52.552546 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.571042 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:52Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.591409 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:52Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.612712 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:52Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.631447 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:52Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.644732 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:52Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.661761 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:52Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.681356 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:52Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.698848 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"117fa63b-d143-4455-afa6-4fb6807a6ca0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f8ee8ebc8754ebd9809529017b18da429ecb0f3dc78f2a04aa8a51cbf6fcc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:28:53Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0227 08:28:25.634767 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0227 08:28:25.637698 1 observer_polling.go:159] Starting file observer\\\\nI0227 08:28:25.680142 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0227 08:28:25.684637 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0227 08:28:53.341127 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0227 08:28:53.341316 1 cmd.go:179] failed checking apiserver connectivity: Get \\\\\\\"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager/leases/cluster-policy-controller-lock\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:52Z is after 2026-02-23T05:33:13Z\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66e419c34471770e8be9908460758e1b0b826937ec72511e5e9a8f17f54b2bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3767fd1cf2cf180596e480e92ff8b11ad3be31fcc73fc13b01346423b90b7495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d50669b6d531679c57566e0c22f499125c94662f2d4e51edbb4a208f5a3f34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:52Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.723220 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"r-operator]} name:Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.245:443: 10.217.5.245:9192:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {54fbe873-7e6d-475f-a0ad-8dd5f06d850d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0227 08:30:46.448860 7409 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z]\\\\nI0227 08:30:46.448834\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:52Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.739862 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41eedc8fb57fff9ee8587cf308c6bc779385fe45c289d2450f5039e83c71b349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:43Z\\\",\\\"message\\\":\\\"2026-02-27T08:29:57+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_80f7ef0c-7919-495d-ba74-c18db36ce5c7\\\\n2026-02-27T08:29:57+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_80f7ef0c-7919-495d-ba74-c18db36ce5c7 to /host/opt/cni/bin/\\\\n2026-02-27T08:29:58Z [verbose] multus-daemon started\\\\n2026-02-27T08:29:58Z [verbose] Readiness Indicator file check\\\\n2026-02-27T08:30:43Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:52Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.753642 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:52Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.767782 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:52Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.783091 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dd39b0c-4ecb-4295-8c9a-0918a2d97319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3065ac6de3ade07cd15c272b286223985df3c354018634e1aef5095952735dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c64bff4f3048eb83cd1d0db774c6448950a550b5074eaf9186244cea8344879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b7a7e5b59d69cc97b3ad6f3d339eb5fe1edcfe4b9c46af545f363773fc6b322\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aad657adbeab12281a0a6c5286d238cc8f9921f9674fcbfe0e7370c53ee9f389\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aad657adbeab12281a0a6c5286d238cc8f9921f9674fcbfe0e7370c53ee9f389\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:52Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.800721 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:52Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.815320 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:52Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.829901 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:52Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:52 crc kubenswrapper[4906]: I0227 08:30:52.841066 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:52Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:52 crc kubenswrapper[4906]: E0227 08:30:52.850383 4906 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 27 08:30:54 crc kubenswrapper[4906]: I0227 08:30:54.551764 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:54 crc kubenswrapper[4906]: I0227 08:30:54.551773 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:54 crc kubenswrapper[4906]: I0227 08:30:54.551818 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:54 crc kubenswrapper[4906]: I0227 08:30:54.551764 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:54 crc kubenswrapper[4906]: E0227 08:30:54.552000 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:54 crc kubenswrapper[4906]: E0227 08:30:54.552283 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:54 crc kubenswrapper[4906]: E0227 08:30:54.552307 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:54 crc kubenswrapper[4906]: E0227 08:30:54.552418 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.238643 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.239142 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.239169 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.239193 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.239255 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:56Z","lastTransitionTime":"2026-02-27T08:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:56 crc kubenswrapper[4906]: E0227 08:30:56.258755 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:56Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.264218 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.264255 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.264264 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.264282 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.264291 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:56Z","lastTransitionTime":"2026-02-27T08:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:56 crc kubenswrapper[4906]: E0227 08:30:56.283072 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:56Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.288276 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.288311 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.288322 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.288341 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.288355 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:56Z","lastTransitionTime":"2026-02-27T08:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:56 crc kubenswrapper[4906]: E0227 08:30:56.301243 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:56Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.306203 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.306275 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.306296 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.306324 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.306350 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:56Z","lastTransitionTime":"2026-02-27T08:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:56 crc kubenswrapper[4906]: E0227 08:30:56.323480 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:56Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.328581 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.328632 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.328644 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.328663 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.328673 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:30:56Z","lastTransitionTime":"2026-02-27T08:30:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:30:56 crc kubenswrapper[4906]: E0227 08:30:56.342069 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:56Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:56Z is after 2025-08-24T17:21:41Z" Feb 27 08:30:56 crc kubenswrapper[4906]: E0227 08:30:56.342237 4906 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.552147 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.552247 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.552278 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:56 crc kubenswrapper[4906]: E0227 08:30:56.552439 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:56 crc kubenswrapper[4906]: I0227 08:30:56.552465 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:56 crc kubenswrapper[4906]: E0227 08:30:56.552555 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:56 crc kubenswrapper[4906]: E0227 08:30:56.552700 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:56 crc kubenswrapper[4906]: E0227 08:30:56.552903 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:57 crc kubenswrapper[4906]: E0227 08:30:57.851509 4906 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 27 08:30:58 crc kubenswrapper[4906]: I0227 08:30:58.457069 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:30:58 crc kubenswrapper[4906]: E0227 08:30:58.457318 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:02.457286068 +0000 UTC m=+220.851687708 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:30:58 crc kubenswrapper[4906]: I0227 08:30:58.457375 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:58 crc kubenswrapper[4906]: I0227 08:30:58.457455 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:58 crc kubenswrapper[4906]: E0227 08:30:58.457550 4906 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 27 08:30:58 crc kubenswrapper[4906]: E0227 08:30:58.457580 4906 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 27 08:30:58 crc kubenswrapper[4906]: E0227 08:30:58.457619 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-27 08:32:02.457602377 +0000 UTC m=+220.852004027 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 27 08:30:58 crc kubenswrapper[4906]: E0227 08:30:58.457651 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-27 08:32:02.457629888 +0000 UTC m=+220.852031528 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 27 08:30:58 crc kubenswrapper[4906]: I0227 08:30:58.551221 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:58 crc kubenswrapper[4906]: I0227 08:30:58.551252 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:30:58 crc kubenswrapper[4906]: I0227 08:30:58.551413 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:58 crc kubenswrapper[4906]: E0227 08:30:58.551440 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:30:58 crc kubenswrapper[4906]: I0227 08:30:58.551466 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:58 crc kubenswrapper[4906]: E0227 08:30:58.551605 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:30:58 crc kubenswrapper[4906]: E0227 08:30:58.551740 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:30:58 crc kubenswrapper[4906]: E0227 08:30:58.551936 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:30:58 crc kubenswrapper[4906]: I0227 08:30:58.558265 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:30:58 crc kubenswrapper[4906]: I0227 08:30:58.558352 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:30:58 crc kubenswrapper[4906]: I0227 08:30:58.558430 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs\") pod \"network-metrics-daemon-6rvgh\" (UID: \"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\") " pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:30:58 crc kubenswrapper[4906]: E0227 08:30:58.558550 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 27 08:30:58 crc kubenswrapper[4906]: E0227 08:30:58.558570 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 27 08:30:58 crc kubenswrapper[4906]: E0227 08:30:58.558597 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 27 08:30:58 crc kubenswrapper[4906]: E0227 08:30:58.558608 4906 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 27 08:30:58 crc kubenswrapper[4906]: E0227 08:30:58.558621 4906 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:30:58 crc kubenswrapper[4906]: E0227 08:30:58.558631 4906 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:30:58 crc kubenswrapper[4906]: E0227 08:30:58.558716 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-27 08:32:02.558687574 +0000 UTC m=+220.953089214 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:30:58 crc kubenswrapper[4906]: E0227 08:30:58.558727 4906 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 27 08:30:58 crc kubenswrapper[4906]: E0227 08:30:58.558753 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-27 08:32:02.558736745 +0000 UTC m=+220.953138385 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 27 08:30:58 crc kubenswrapper[4906]: E0227 08:30:58.558924 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs podName:9bb2ded7-f8fe-4978-81cd-08cafe0fe124 nodeName:}" failed. No retries permitted until 2026-02-27 08:32:02.558847328 +0000 UTC m=+220.953248978 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs") pod "network-metrics-daemon-6rvgh" (UID: "9bb2ded7-f8fe-4978-81cd-08cafe0fe124") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 27 08:31:00 crc kubenswrapper[4906]: I0227 08:31:00.551954 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:00 crc kubenswrapper[4906]: I0227 08:31:00.551949 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:00 crc kubenswrapper[4906]: I0227 08:31:00.552107 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:00 crc kubenswrapper[4906]: I0227 08:31:00.552131 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:00 crc kubenswrapper[4906]: E0227 08:31:00.552215 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:00 crc kubenswrapper[4906]: E0227 08:31:00.552361 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:00 crc kubenswrapper[4906]: E0227 08:31:00.552488 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:00 crc kubenswrapper[4906]: E0227 08:31:00.552688 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:01 crc kubenswrapper[4906]: I0227 08:31:01.552361 4906 scope.go:117] "RemoveContainer" containerID="bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18" Feb 27 08:31:01 crc kubenswrapper[4906]: E0227 08:31:01.552559 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.551117 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.551136 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.551196 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:02 crc kubenswrapper[4906]: E0227 08:31:02.551279 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.551313 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:02 crc kubenswrapper[4906]: E0227 08:31:02.551391 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:02 crc kubenswrapper[4906]: E0227 08:31:02.551489 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:02 crc kubenswrapper[4906]: E0227 08:31:02.551619 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.570218 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.586635 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.610869 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.626270 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.645836 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.663050 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"117fa63b-d143-4455-afa6-4fb6807a6ca0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f8ee8ebc8754ebd9809529017b18da429ecb0f3dc78f2a04aa8a51cbf6fcc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:28:53Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0227 08:28:25.634767 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0227 08:28:25.637698 1 observer_polling.go:159] Starting file observer\\\\nI0227 08:28:25.680142 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0227 08:28:25.684637 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0227 08:28:53.341127 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0227 08:28:53.341316 1 cmd.go:179] failed checking apiserver connectivity: Get \\\\\\\"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager/leases/cluster-policy-controller-lock\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:52Z is after 2026-02-23T05:33:13Z\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66e419c34471770e8be9908460758e1b0b826937ec72511e5e9a8f17f54b2bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3767fd1cf2cf180596e480e92ff8b11ad3be31fcc73fc13b01346423b90b7495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d50669b6d531679c57566e0c22f499125c94662f2d4e51edbb4a208f5a3f34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.687691 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"r-operator]} name:Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.245:443: 10.217.5.245:9192:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {54fbe873-7e6d-475f-a0ad-8dd5f06d850d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0227 08:30:46.448860 7409 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z]\\\\nI0227 08:30:46.448834\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.708636 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41eedc8fb57fff9ee8587cf308c6bc779385fe45c289d2450f5039e83c71b349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:43Z\\\",\\\"message\\\":\\\"2026-02-27T08:29:57+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_80f7ef0c-7919-495d-ba74-c18db36ce5c7\\\\n2026-02-27T08:29:57+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_80f7ef0c-7919-495d-ba74-c18db36ce5c7 to /host/opt/cni/bin/\\\\n2026-02-27T08:29:58Z [verbose] multus-daemon started\\\\n2026-02-27T08:29:58Z [verbose] Readiness Indicator file check\\\\n2026-02-27T08:30:43Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.725119 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.739520 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.754624 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.770220 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dd39b0c-4ecb-4295-8c9a-0918a2d97319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3065ac6de3ade07cd15c272b286223985df3c354018634e1aef5095952735dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c64bff4f3048eb83cd1d0db774c6448950a550b5074eaf9186244cea8344879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b7a7e5b59d69cc97b3ad6f3d339eb5fe1edcfe4b9c46af545f363773fc6b322\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aad657adbeab12281a0a6c5286d238cc8f9921f9674fcbfe0e7370c53ee9f389\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aad657adbeab12281a0a6c5286d238cc8f9921f9674fcbfe0e7370c53ee9f389\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.792148 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.808601 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.825905 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.842245 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:02 crc kubenswrapper[4906]: E0227 08:31:02.852521 4906 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 27 08:31:02 crc kubenswrapper[4906]: I0227 08:31:02.856539 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:02Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:04 crc kubenswrapper[4906]: I0227 08:31:04.551542 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:04 crc kubenswrapper[4906]: I0227 08:31:04.551612 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:04 crc kubenswrapper[4906]: I0227 08:31:04.551558 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:04 crc kubenswrapper[4906]: E0227 08:31:04.551736 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:04 crc kubenswrapper[4906]: I0227 08:31:04.551777 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:04 crc kubenswrapper[4906]: E0227 08:31:04.551848 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:04 crc kubenswrapper[4906]: E0227 08:31:04.551961 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:04 crc kubenswrapper[4906]: E0227 08:31:04.552286 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.551536 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.551561 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.551558 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.551536 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:06 crc kubenswrapper[4906]: E0227 08:31:06.551767 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:06 crc kubenswrapper[4906]: E0227 08:31:06.551925 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:06 crc kubenswrapper[4906]: E0227 08:31:06.551963 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:06 crc kubenswrapper[4906]: E0227 08:31:06.552015 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.655800 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.655858 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.655870 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.655914 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.655928 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:31:06Z","lastTransitionTime":"2026-02-27T08:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:31:06 crc kubenswrapper[4906]: E0227 08:31:06.669769 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:06Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.673355 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.673395 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.673407 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.673424 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.673436 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:31:06Z","lastTransitionTime":"2026-02-27T08:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:31:06 crc kubenswrapper[4906]: E0227 08:31:06.691043 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:06Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.694615 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.694645 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.694654 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.694666 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.694674 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:31:06Z","lastTransitionTime":"2026-02-27T08:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:31:06 crc kubenswrapper[4906]: E0227 08:31:06.707386 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:06Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.710847 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.710871 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.710899 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.710920 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.710953 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:31:06Z","lastTransitionTime":"2026-02-27T08:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:31:06 crc kubenswrapper[4906]: E0227 08:31:06.724472 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:06Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.728427 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.728468 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.728476 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.728491 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:31:06 crc kubenswrapper[4906]: I0227 08:31:06.728500 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:31:06Z","lastTransitionTime":"2026-02-27T08:31:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:31:06 crc kubenswrapper[4906]: E0227 08:31:06.742519 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:06Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:06 crc kubenswrapper[4906]: E0227 08:31:06.742694 4906 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 27 08:31:07 crc kubenswrapper[4906]: E0227 08:31:07.854697 4906 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 27 08:31:08 crc kubenswrapper[4906]: I0227 08:31:08.551606 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:08 crc kubenswrapper[4906]: I0227 08:31:08.551666 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:08 crc kubenswrapper[4906]: I0227 08:31:08.551763 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:08 crc kubenswrapper[4906]: I0227 08:31:08.551725 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:08 crc kubenswrapper[4906]: E0227 08:31:08.552313 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:08 crc kubenswrapper[4906]: E0227 08:31:08.552374 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:08 crc kubenswrapper[4906]: E0227 08:31:08.552423 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:08 crc kubenswrapper[4906]: E0227 08:31:08.552712 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:08 crc kubenswrapper[4906]: I0227 08:31:08.570604 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Feb 27 08:31:10 crc kubenswrapper[4906]: I0227 08:31:10.551087 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:10 crc kubenswrapper[4906]: I0227 08:31:10.551223 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:10 crc kubenswrapper[4906]: E0227 08:31:10.551328 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:10 crc kubenswrapper[4906]: I0227 08:31:10.551355 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:10 crc kubenswrapper[4906]: I0227 08:31:10.551435 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:10 crc kubenswrapper[4906]: E0227 08:31:10.551594 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:10 crc kubenswrapper[4906]: E0227 08:31:10.551678 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:10 crc kubenswrapper[4906]: E0227 08:31:10.551902 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:11 crc kubenswrapper[4906]: I0227 08:31:11.565073 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.551719 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:12 crc kubenswrapper[4906]: E0227 08:31:12.551930 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.552202 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.552305 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.552225 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:12 crc kubenswrapper[4906]: E0227 08:31:12.552428 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:12 crc kubenswrapper[4906]: E0227 08:31:12.552575 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:12 crc kubenswrapper[4906]: E0227 08:31:12.552675 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.568958 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-24rf6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4b021f0d-0615-479d-ab6c-6736222572f1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bce2fbaa3258bf0724f2e5a4509268ce664e9bceec4973861c9568c22bbe7faf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ltkxm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-24rf6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.587591 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-wxkxk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a232e3c-1fa4-4163-bb31-bd2f9891f259\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c217467eac6dd4e0932205e3b5c8097f185a14fb3dfe3c0006fccb017e2753a1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tpmgx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-wxkxk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.609607 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"117fa63b-d143-4455-afa6-4fb6807a6ca0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f8ee8ebc8754ebd9809529017b18da429ecb0f3dc78f2a04aa8a51cbf6fcc0b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1c51e378716601f9f31859a63dba132d3f3fe7363b7c55daed95e383a021f5f3\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:28:53Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0227 08:28:25.634767 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0227 08:28:25.637698 1 observer_polling.go:159] Starting file observer\\\\nI0227 08:28:25.680142 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0227 08:28:25.684637 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0227 08:28:53.341127 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0227 08:28:53.341316 1 cmd.go:179] failed checking apiserver connectivity: Get \\\\\\\"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/openshift-kube-controller-manager/leases/cluster-policy-controller-lock\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:28:52Z is after 2026-02-23T05:33:13Z\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66e419c34471770e8be9908460758e1b0b826937ec72511e5e9a8f17f54b2bec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3767fd1cf2cf180596e480e92ff8b11ad3be31fcc73fc13b01346423b90b7495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f1d50669b6d531679c57566e0c22f499125c94662f2d4e51edbb4a208f5a3f34\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.633282 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"message\\\":\\\"++ K8S_NODE=\\\\n++ [[ -n '' ]]\\\\n++ northd_pidfile=/var/run/ovn/ovn-northd.pid\\\\n++ controller_pidfile=/var/run/ovn/ovn-controller.pid\\\\n++ controller_logfile=/var/log/ovn/acl-audit-log.log\\\\n++ vswitch_dbsock=/var/run/openvswitch/db.sock\\\\n++ nbdb_pidfile=/var/run/ovn/ovnnb_db.pid\\\\n++ nbdb_sock=/var/run/ovn/ovnnb_db.sock\\\\n++ nbdb_ctl=/var/run/ovn/ovnnb_db.ctl\\\\n++ sbdb_pidfile=/var/run/ovn/ovnsb_db.pid\\\\n++ sbdb_sock=/var/run/ovn/ovnsb_db.sock\\\\n++ sbdb_ctl=/var/run/ovn/ovnsb_db.ctl\\\\n+ start-audit-log-rotation\\\\n+ MAXFILESIZE=50000000\\\\n+ MAXLOGFILES=5\\\\n++ dirname /var/log/ovn/acl-audit-log.log\\\\n+ LOGDIR=/var/log/ovn\\\\n+ local retries=0\\\\n+ [[ 30 -gt 0 ]]\\\\n+ (( retries += 1 ))\\\\n++ cat /var/run/ovn/ovn-controller.pid\\\\ncat: /var/run/ovn/ovn-controller.pid: No such file or directory\\\\n+ CONTROLLERPID=\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:02Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:46Z\\\",\\\"message\\\":\\\"r-operator]} name:Service_openshift-machine-api/cluster-autoscaler-operator_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.245:443: 10.217.5.245:9192:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {54fbe873-7e6d-475f-a0ad-8dd5f06d850d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0227 08:30:46.448860 7409 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:30:46Z is after 2025-08-24T17:21:41Z]\\\\nI0227 08:30:46.448834\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l5jtf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-lck5x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.650589 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-6nxxh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a961de01-e505-4c80-96a0-333da958a633\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://41eedc8fb57fff9ee8587cf308c6bc779385fe45c289d2450f5039e83c71b349\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-27T08:30:43Z\\\",\\\"message\\\":\\\"2026-02-27T08:29:57+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_80f7ef0c-7919-495d-ba74-c18db36ce5c7\\\\n2026-02-27T08:29:57+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_80f7ef0c-7919-495d-ba74-c18db36ce5c7 to /host/opt/cni/bin/\\\\n2026-02-27T08:29:58Z [verbose] multus-daemon started\\\\n2026-02-27T08:29:58Z [verbose] Readiness Indicator file check\\\\n2026-02-27T08:30:43Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-f2fwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-6nxxh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.666441 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.679741 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.690476 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3f99e084f7e135e4acc346467157884c932bec3bb42211783d4399c61cc2d324\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.700975 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a702b787-cadb-48e8-a5db-ff2184559305\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8ac0b72f2c937a25ee718957cbdccae98f76d3f6684e69bb1b89062040fdd63\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f23b21d14cdf03fbee2edf22368e005836dcee32f0eced46ee2e41e977a96335\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f23b21d14cdf03fbee2edf22368e005836dcee32f0eced46ee2e41e977a96335\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.711561 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6dd39b0c-4ecb-4295-8c9a-0918a2d97319\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3065ac6de3ade07cd15c272b286223985df3c354018634e1aef5095952735dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3c64bff4f3048eb83cd1d0db774c6448950a550b5074eaf9186244cea8344879\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3b7a7e5b59d69cc97b3ad6f3d339eb5fe1edcfe4b9c46af545f363773fc6b322\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aad657adbeab12281a0a6c5286d238cc8f9921f9674fcbfe0e7370c53ee9f389\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://aad657adbeab12281a0a6c5286d238cc8f9921f9674fcbfe0e7370c53ee9f389\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.724964 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8122b6f931b63a7b8568d8337760b1f3e69538861a3785189f483086cc69fd84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.736593 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"165ec89d-3872-45c2-9529-c3c8430d3798\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-27T08:29:27Z\\\",\\\"message\\\":\\\"le observer\\\\nW0227 08:29:27.405767 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0227 08:29:27.406064 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0227 08:29:27.407374 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1449978071/tls.crt::/tmp/serving-cert-1449978071/tls.key\\\\\\\"\\\\nI0227 08:29:27.650503 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0227 08:29:27.663137 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0227 08:29:27.663166 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0227 08:29:27.663191 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0227 08:29:27.663199 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0227 08:29:27.671268 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0227 08:29:27.671351 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671386 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0227 08:29:27.671417 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0227 08:29:27.671457 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0227 08:29:27.671491 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0227 08:29:27.671531 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0227 08:29:27.671296 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0227 08:29:27.673784 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:13Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.748259 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7fgsd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-6rvgh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.760523 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"aac6a472-a779-4ece-a897-c062a410c555\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a3bcdb93cdc93ccfe6ebe595d72ae943144f34eaef132263cbcf8aaac4559464\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6337aebfae16f1625857bca2b5a1a039affdfdb278cf6460c2411ecaea714b59\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hfpcq\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-mjwsd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.773209 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab1951bd61adbc44cfd9a59f9b20db991a197da9f17cc85dc46c8576c2e18db5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8827e5160fe944bd374c2f3478d3e10f71dd93694d2d52b18de5a360de1a065c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.783132 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.801216 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"491056ab-9a2f-4fa7-972f-cf5a3cfa1376\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://737a44f829a3425d302c5cae15a8f6122fb3d0af83df591f77f2621228ff1421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f337ed2892c2b7d76961dd9ffddf9c734b56bd4c33b5092d4130aaf4dac438a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e42a7d168646c14021320a60fecff660555424fa0301e8b94ec2a33a2fa264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4171fc6975c6694f0a11072977703b9835265735638ff89800dbe7b6c0b8b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29eff573776c7b8b5e81766ab5f94d3e624e79bb46c99974bca712ae636f6cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6dfcfacc19fca2a2d3163232cf910127d5fc217f2172b435d643985340464f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6dfcfacc19fca2a2d3163232cf910127d5fc217f2172b435d643985340464f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be9e110fce50dcce9faa851cc41868baceb592e8a0e8cdc2363bba73308d8c6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be9e110fce50dcce9faa851cc41868baceb592e8a0e8cdc2363bba73308d8c6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b77f4f0e00b2d7289c5e83f9ae56767316b65f0ae2e774d7c709cf423ba9b437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b77f4f0e00b2d7289c5e83f9ae56767316b65f0ae2e774d7c709cf423ba9b437\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.812794 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:12 crc kubenswrapper[4906]: I0227 08:31:12.826367 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f741b698-d9d4-4e22-800a-91e67ca6e260\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:30:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1dc718b31d31ab2d2a6b9527982b42882f32cf5c72dae2582380fd9d2457b567\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:30:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c22151d21335b00d3cbf4d6b7303d3f643399ea84a19751996130912ae524c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://afa12e48265294a613bf1dfb91f3e337bcdb212fcbf28c53c990a5613f953923\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://41ab541dab882d3535ddb47aaff7f23b56cd27bd856372826ff564d8d4f2c1c9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:58Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://64c7be4993dcda76623692daec118e135977e56c400c68a09d721ebcf0ff58d4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:29:59Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:29:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e2427fa5365a6dca541d53785e3473841c0b98adb0691b4ae7a40c9dad6e78a5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:02Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9357453faef70e5c8857f8ad27b20d1992b86be08c54758a796a443f0a99ace1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:30:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:30:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-slkv2\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-9cqzh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:12Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:12 crc kubenswrapper[4906]: E0227 08:31:12.855275 4906 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 27 08:31:14 crc kubenswrapper[4906]: I0227 08:31:14.552223 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:14 crc kubenswrapper[4906]: I0227 08:31:14.552296 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:14 crc kubenswrapper[4906]: I0227 08:31:14.552400 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:14 crc kubenswrapper[4906]: I0227 08:31:14.552590 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:14 crc kubenswrapper[4906]: E0227 08:31:14.552741 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:14 crc kubenswrapper[4906]: E0227 08:31:14.553063 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:14 crc kubenswrapper[4906]: E0227 08:31:14.553078 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:14 crc kubenswrapper[4906]: E0227 08:31:14.553137 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:14 crc kubenswrapper[4906]: I0227 08:31:14.553656 4906 scope.go:117] "RemoveContainer" containerID="bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18" Feb 27 08:31:14 crc kubenswrapper[4906]: E0227 08:31:14.553833 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" Feb 27 08:31:16 crc kubenswrapper[4906]: I0227 08:31:16.551158 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:16 crc kubenswrapper[4906]: I0227 08:31:16.551192 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:16 crc kubenswrapper[4906]: I0227 08:31:16.551246 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:16 crc kubenswrapper[4906]: I0227 08:31:16.551572 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:16 crc kubenswrapper[4906]: E0227 08:31:16.551556 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:16 crc kubenswrapper[4906]: E0227 08:31:16.551943 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:16 crc kubenswrapper[4906]: E0227 08:31:16.552046 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:16 crc kubenswrapper[4906]: E0227 08:31:16.552345 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.058316 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.058367 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.058380 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.058396 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.058408 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:31:17Z","lastTransitionTime":"2026-02-27T08:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:31:17 crc kubenswrapper[4906]: E0227 08:31:17.072326 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:17Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.076327 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.076457 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.076499 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.076531 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.076561 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:31:17Z","lastTransitionTime":"2026-02-27T08:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:31:17 crc kubenswrapper[4906]: E0227 08:31:17.091413 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:17Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.095695 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.095749 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.095766 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.095793 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.095812 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:31:17Z","lastTransitionTime":"2026-02-27T08:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:31:17 crc kubenswrapper[4906]: E0227 08:31:17.108697 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:17Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.112391 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.112457 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.112482 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.112511 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.112533 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:31:17Z","lastTransitionTime":"2026-02-27T08:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:31:17 crc kubenswrapper[4906]: E0227 08:31:17.126095 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:17Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.129866 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.129944 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.129959 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.129978 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:31:17 crc kubenswrapper[4906]: I0227 08:31:17.129996 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:31:17Z","lastTransitionTime":"2026-02-27T08:31:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:31:17 crc kubenswrapper[4906]: E0227 08:31:17.147483 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404548Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865348Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-27T08:31:17Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"b1730d04-932a-4b41-89e7-49b2fa07b78e\\\",\\\"systemUUID\\\":\\\"6a844eb5-aea5-4505-a424-d96dc4bc1329\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:17Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:17 crc kubenswrapper[4906]: E0227 08:31:17.147602 4906 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 27 08:31:17 crc kubenswrapper[4906]: E0227 08:31:17.856935 4906 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 27 08:31:18 crc kubenswrapper[4906]: I0227 08:31:18.551850 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:18 crc kubenswrapper[4906]: I0227 08:31:18.551928 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:18 crc kubenswrapper[4906]: I0227 08:31:18.552081 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:18 crc kubenswrapper[4906]: I0227 08:31:18.552156 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:18 crc kubenswrapper[4906]: E0227 08:31:18.552146 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:18 crc kubenswrapper[4906]: E0227 08:31:18.552231 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:18 crc kubenswrapper[4906]: E0227 08:31:18.552398 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:18 crc kubenswrapper[4906]: E0227 08:31:18.552560 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:20 crc kubenswrapper[4906]: I0227 08:31:20.551965 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:20 crc kubenswrapper[4906]: I0227 08:31:20.551999 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:20 crc kubenswrapper[4906]: I0227 08:31:20.551969 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:20 crc kubenswrapper[4906]: E0227 08:31:20.552134 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:20 crc kubenswrapper[4906]: E0227 08:31:20.552293 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:20 crc kubenswrapper[4906]: I0227 08:31:20.552286 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:20 crc kubenswrapper[4906]: E0227 08:31:20.552521 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:20 crc kubenswrapper[4906]: E0227 08:31:20.552756 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:22 crc kubenswrapper[4906]: I0227 08:31:22.551678 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:22 crc kubenswrapper[4906]: I0227 08:31:22.551772 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:22 crc kubenswrapper[4906]: I0227 08:31:22.551827 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:22 crc kubenswrapper[4906]: I0227 08:31:22.551857 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:22 crc kubenswrapper[4906]: E0227 08:31:22.551970 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:22 crc kubenswrapper[4906]: E0227 08:31:22.552232 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:22 crc kubenswrapper[4906]: E0227 08:31:22.552634 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:22 crc kubenswrapper[4906]: E0227 08:31:22.552678 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:22 crc kubenswrapper[4906]: I0227 08:31:22.573148 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8e7fe757e18bb51e87d92c99edb470c73aff5a796fe864beb7da261af37dcf2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:29:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-66chc\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:29:54Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2s5wg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:22 crc kubenswrapper[4906]: I0227 08:31:22.605817 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"491056ab-9a2f-4fa7-972f-cf5a3cfa1376\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-27T08:28:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://737a44f829a3425d302c5cae15a8f6122fb3d0af83df591f77f2621228ff1421\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f337ed2892c2b7d76961dd9ffddf9c734b56bd4c33b5092d4130aaf4dac438a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e9e42a7d168646c14021320a60fecff660555424fa0301e8b94ec2a33a2fa264\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4171fc6975c6694f0a11072977703b9835265735638ff89800dbe7b6c0b8b45\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://29eff573776c7b8b5e81766ab5f94d3e624e79bb46c99974bca712ae636f6cb5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-27T08:28:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f6dfcfacc19fca2a2d3163232cf910127d5fc217f2172b435d643985340464f2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6dfcfacc19fca2a2d3163232cf910127d5fc217f2172b435d643985340464f2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://be9e110fce50dcce9faa851cc41868baceb592e8a0e8cdc2363bba73308d8c6f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be9e110fce50dcce9faa851cc41868baceb592e8a0e8cdc2363bba73308d8c6f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:25Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://b77f4f0e00b2d7289c5e83f9ae56767316b65f0ae2e774d7c709cf423ba9b437\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b77f4f0e00b2d7289c5e83f9ae56767316b65f0ae2e774d7c709cf423ba9b437\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-27T08:28:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-27T08:28:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-27T08:28:22Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:22 crc kubenswrapper[4906]: I0227 08:31:22.626439 4906 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-27T08:29:54Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-27T08:31:22Z is after 2025-08-24T17:21:41Z" Feb 27 08:31:22 crc kubenswrapper[4906]: I0227 08:31:22.671567 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-9cqzh" podStartSLOduration=133.671491801 podStartE2EDuration="2m13.671491801s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:31:22.670285776 +0000 UTC m=+181.064687386" watchObservedRunningTime="2026-02-27 08:31:22.671491801 +0000 UTC m=+181.065893451" Feb 27 08:31:22 crc kubenswrapper[4906]: I0227 08:31:22.686610 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-mjwsd" podStartSLOduration=133.686579712 podStartE2EDuration="2m13.686579712s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:31:22.686425498 +0000 UTC m=+181.080827148" watchObservedRunningTime="2026-02-27 08:31:22.686579712 +0000 UTC m=+181.080981352" Feb 27 08:31:22 crc kubenswrapper[4906]: I0227 08:31:22.723487 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=66.723469771 podStartE2EDuration="1m6.723469771s" podCreationTimestamp="2026-02-27 08:30:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:31:22.723228884 +0000 UTC m=+181.117630504" watchObservedRunningTime="2026-02-27 08:31:22.723469771 +0000 UTC m=+181.117871371" Feb 27 08:31:22 crc kubenswrapper[4906]: I0227 08:31:22.788908 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-24rf6" podStartSLOduration=133.788870704 podStartE2EDuration="2m13.788870704s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:31:22.788755071 +0000 UTC m=+181.183156691" watchObservedRunningTime="2026-02-27 08:31:22.788870704 +0000 UTC m=+181.183272324" Feb 27 08:31:22 crc kubenswrapper[4906]: I0227 08:31:22.789190 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-6nxxh" podStartSLOduration=133.789185064 podStartE2EDuration="2m13.789185064s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:31:22.770966501 +0000 UTC m=+181.165368131" watchObservedRunningTime="2026-02-27 08:31:22.789185064 +0000 UTC m=+181.183586674" Feb 27 08:31:22 crc kubenswrapper[4906]: I0227 08:31:22.810111 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-wxkxk" podStartSLOduration=133.810094875 podStartE2EDuration="2m13.810094875s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:31:22.809557899 +0000 UTC m=+181.203959549" watchObservedRunningTime="2026-02-27 08:31:22.810094875 +0000 UTC m=+181.204496485" Feb 27 08:31:22 crc kubenswrapper[4906]: I0227 08:31:22.830848 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=11.830829442 podStartE2EDuration="11.830829442s" podCreationTimestamp="2026-02-27 08:31:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:31:22.830504342 +0000 UTC m=+181.224905962" watchObservedRunningTime="2026-02-27 08:31:22.830829442 +0000 UTC m=+181.225231052" Feb 27 08:31:22 crc kubenswrapper[4906]: I0227 08:31:22.842438 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=38.842421181 podStartE2EDuration="38.842421181s" podCreationTimestamp="2026-02-27 08:30:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:31:22.842114602 +0000 UTC m=+181.236516222" watchObservedRunningTime="2026-02-27 08:31:22.842421181 +0000 UTC m=+181.236822791" Feb 27 08:31:22 crc kubenswrapper[4906]: E0227 08:31:22.857358 4906 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 27 08:31:22 crc kubenswrapper[4906]: I0227 08:31:22.916187 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=83.916165928 podStartE2EDuration="1m23.916165928s" podCreationTimestamp="2026-02-27 08:29:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:31:22.906086553 +0000 UTC m=+181.300488163" watchObservedRunningTime="2026-02-27 08:31:22.916165928 +0000 UTC m=+181.310567538" Feb 27 08:31:24 crc kubenswrapper[4906]: I0227 08:31:24.551667 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:24 crc kubenswrapper[4906]: E0227 08:31:24.554738 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:24 crc kubenswrapper[4906]: I0227 08:31:24.551988 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:24 crc kubenswrapper[4906]: E0227 08:31:24.554868 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:24 crc kubenswrapper[4906]: I0227 08:31:24.551995 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:24 crc kubenswrapper[4906]: E0227 08:31:24.554953 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:24 crc kubenswrapper[4906]: I0227 08:31:24.551764 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:24 crc kubenswrapper[4906]: E0227 08:31:24.555027 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:25 crc kubenswrapper[4906]: I0227 08:31:25.553151 4906 scope.go:117] "RemoveContainer" containerID="bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18" Feb 27 08:31:25 crc kubenswrapper[4906]: E0227 08:31:25.553458 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-lck5x_openshift-ovn-kubernetes(0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2)\"" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" Feb 27 08:31:26 crc kubenswrapper[4906]: I0227 08:31:26.552243 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:26 crc kubenswrapper[4906]: I0227 08:31:26.552307 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:26 crc kubenswrapper[4906]: E0227 08:31:26.552403 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:26 crc kubenswrapper[4906]: I0227 08:31:26.552243 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:26 crc kubenswrapper[4906]: I0227 08:31:26.552546 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:26 crc kubenswrapper[4906]: E0227 08:31:26.552770 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:26 crc kubenswrapper[4906]: E0227 08:31:26.552855 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:26 crc kubenswrapper[4906]: E0227 08:31:26.553029 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.156141 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.156187 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.156196 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.156209 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.156220 4906 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-27T08:31:27Z","lastTransitionTime":"2026-02-27T08:31:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.219639 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8"] Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.220299 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.222843 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.223486 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.223541 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.224101 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.258873 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=19.258841969 podStartE2EDuration="19.258841969s" podCreationTimestamp="2026-02-27 08:31:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:31:27.257234552 +0000 UTC m=+185.651636202" watchObservedRunningTime="2026-02-27 08:31:27.258841969 +0000 UTC m=+185.653243629" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.291266 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/3fbc0235-4c1e-4d6a-a69d-929df70ef259-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-7k9c8\" (UID: \"3fbc0235-4c1e-4d6a-a69d-929df70ef259\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.291344 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3fbc0235-4c1e-4d6a-a69d-929df70ef259-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-7k9c8\" (UID: \"3fbc0235-4c1e-4d6a-a69d-929df70ef259\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.291382 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/3fbc0235-4c1e-4d6a-a69d-929df70ef259-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-7k9c8\" (UID: \"3fbc0235-4c1e-4d6a-a69d-929df70ef259\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.291433 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3fbc0235-4c1e-4d6a-a69d-929df70ef259-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-7k9c8\" (UID: \"3fbc0235-4c1e-4d6a-a69d-929df70ef259\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.291630 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3fbc0235-4c1e-4d6a-a69d-929df70ef259-service-ca\") pod \"cluster-version-operator-5c965bbfc6-7k9c8\" (UID: \"3fbc0235-4c1e-4d6a-a69d-929df70ef259\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.294537 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podStartSLOduration=138.294513572 podStartE2EDuration="2m18.294513572s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:31:27.292648918 +0000 UTC m=+185.687050568" watchObservedRunningTime="2026-02-27 08:31:27.294513572 +0000 UTC m=+185.688915193" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.392368 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3fbc0235-4c1e-4d6a-a69d-929df70ef259-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-7k9c8\" (UID: \"3fbc0235-4c1e-4d6a-a69d-929df70ef259\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.392423 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3fbc0235-4c1e-4d6a-a69d-929df70ef259-service-ca\") pod \"cluster-version-operator-5c965bbfc6-7k9c8\" (UID: \"3fbc0235-4c1e-4d6a-a69d-929df70ef259\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.392474 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/3fbc0235-4c1e-4d6a-a69d-929df70ef259-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-7k9c8\" (UID: \"3fbc0235-4c1e-4d6a-a69d-929df70ef259\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.392500 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3fbc0235-4c1e-4d6a-a69d-929df70ef259-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-7k9c8\" (UID: \"3fbc0235-4c1e-4d6a-a69d-929df70ef259\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.392521 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/3fbc0235-4c1e-4d6a-a69d-929df70ef259-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-7k9c8\" (UID: \"3fbc0235-4c1e-4d6a-a69d-929df70ef259\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.392596 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/3fbc0235-4c1e-4d6a-a69d-929df70ef259-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-7k9c8\" (UID: \"3fbc0235-4c1e-4d6a-a69d-929df70ef259\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.393512 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/3fbc0235-4c1e-4d6a-a69d-929df70ef259-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-7k9c8\" (UID: \"3fbc0235-4c1e-4d6a-a69d-929df70ef259\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.393839 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3fbc0235-4c1e-4d6a-a69d-929df70ef259-service-ca\") pod \"cluster-version-operator-5c965bbfc6-7k9c8\" (UID: \"3fbc0235-4c1e-4d6a-a69d-929df70ef259\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.399070 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3fbc0235-4c1e-4d6a-a69d-929df70ef259-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-7k9c8\" (UID: \"3fbc0235-4c1e-4d6a-a69d-929df70ef259\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.411183 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3fbc0235-4c1e-4d6a-a69d-929df70ef259-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-7k9c8\" (UID: \"3fbc0235-4c1e-4d6a-a69d-929df70ef259\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.543140 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8" Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.545984 4906 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.558468 4906 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Feb 27 08:31:27 crc kubenswrapper[4906]: W0227 08:31:27.567346 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3fbc0235_4c1e_4d6a_a69d_929df70ef259.slice/crio-9b42ca70e4f4ee1a66aa9428759c2852e743acba1e377e7d2a5a566b56b3146e WatchSource:0}: Error finding container 9b42ca70e4f4ee1a66aa9428759c2852e743acba1e377e7d2a5a566b56b3146e: Status 404 returned error can't find the container with id 9b42ca70e4f4ee1a66aa9428759c2852e743acba1e377e7d2a5a566b56b3146e Feb 27 08:31:27 crc kubenswrapper[4906]: I0227 08:31:27.656185 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8" event={"ID":"3fbc0235-4c1e-4d6a-a69d-929df70ef259","Type":"ContainerStarted","Data":"9b42ca70e4f4ee1a66aa9428759c2852e743acba1e377e7d2a5a566b56b3146e"} Feb 27 08:31:27 crc kubenswrapper[4906]: E0227 08:31:27.859228 4906 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 27 08:31:28 crc kubenswrapper[4906]: I0227 08:31:28.551342 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:28 crc kubenswrapper[4906]: I0227 08:31:28.551390 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:28 crc kubenswrapper[4906]: I0227 08:31:28.551375 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:28 crc kubenswrapper[4906]: I0227 08:31:28.551342 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:28 crc kubenswrapper[4906]: E0227 08:31:28.551557 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:28 crc kubenswrapper[4906]: E0227 08:31:28.552124 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:28 crc kubenswrapper[4906]: E0227 08:31:28.552126 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:28 crc kubenswrapper[4906]: E0227 08:31:28.552186 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:28 crc kubenswrapper[4906]: I0227 08:31:28.665265 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8" event={"ID":"3fbc0235-4c1e-4d6a-a69d-929df70ef259","Type":"ContainerStarted","Data":"24aa981e65b144b67b325aa9d51dd62852bfb2453395b7b3cf9eccdc06cf2f61"} Feb 27 08:31:28 crc kubenswrapper[4906]: I0227 08:31:28.686951 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-7k9c8" podStartSLOduration=139.686928019 podStartE2EDuration="2m19.686928019s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:31:28.686187167 +0000 UTC m=+187.080588787" watchObservedRunningTime="2026-02-27 08:31:28.686928019 +0000 UTC m=+187.081329649" Feb 27 08:31:30 crc kubenswrapper[4906]: I0227 08:31:30.552023 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:30 crc kubenswrapper[4906]: I0227 08:31:30.552077 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:30 crc kubenswrapper[4906]: I0227 08:31:30.552052 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:30 crc kubenswrapper[4906]: I0227 08:31:30.552024 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:30 crc kubenswrapper[4906]: E0227 08:31:30.552213 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:30 crc kubenswrapper[4906]: E0227 08:31:30.552344 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:30 crc kubenswrapper[4906]: E0227 08:31:30.552451 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:30 crc kubenswrapper[4906]: E0227 08:31:30.552560 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:30 crc kubenswrapper[4906]: I0227 08:31:30.675497 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6nxxh_a961de01-e505-4c80-96a0-333da958a633/kube-multus/1.log" Feb 27 08:31:30 crc kubenswrapper[4906]: I0227 08:31:30.676470 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6nxxh_a961de01-e505-4c80-96a0-333da958a633/kube-multus/0.log" Feb 27 08:31:30 crc kubenswrapper[4906]: I0227 08:31:30.676584 4906 generic.go:334] "Generic (PLEG): container finished" podID="a961de01-e505-4c80-96a0-333da958a633" containerID="41eedc8fb57fff9ee8587cf308c6bc779385fe45c289d2450f5039e83c71b349" exitCode=1 Feb 27 08:31:30 crc kubenswrapper[4906]: I0227 08:31:30.676664 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-6nxxh" event={"ID":"a961de01-e505-4c80-96a0-333da958a633","Type":"ContainerDied","Data":"41eedc8fb57fff9ee8587cf308c6bc779385fe45c289d2450f5039e83c71b349"} Feb 27 08:31:30 crc kubenswrapper[4906]: I0227 08:31:30.676764 4906 scope.go:117] "RemoveContainer" containerID="ec7ee5bade13a528668d4e2c0563c8cb86c82e15e3db0d2b0ad8e011c8c0bb26" Feb 27 08:31:30 crc kubenswrapper[4906]: I0227 08:31:30.677574 4906 scope.go:117] "RemoveContainer" containerID="41eedc8fb57fff9ee8587cf308c6bc779385fe45c289d2450f5039e83c71b349" Feb 27 08:31:30 crc kubenswrapper[4906]: E0227 08:31:30.678250 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-6nxxh_openshift-multus(a961de01-e505-4c80-96a0-333da958a633)\"" pod="openshift-multus/multus-6nxxh" podUID="a961de01-e505-4c80-96a0-333da958a633" Feb 27 08:31:31 crc kubenswrapper[4906]: I0227 08:31:31.681662 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6nxxh_a961de01-e505-4c80-96a0-333da958a633/kube-multus/1.log" Feb 27 08:31:32 crc kubenswrapper[4906]: I0227 08:31:32.552056 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:32 crc kubenswrapper[4906]: I0227 08:31:32.552097 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:32 crc kubenswrapper[4906]: I0227 08:31:32.552158 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:32 crc kubenswrapper[4906]: I0227 08:31:32.553290 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:32 crc kubenswrapper[4906]: E0227 08:31:32.553289 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:32 crc kubenswrapper[4906]: E0227 08:31:32.553425 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:32 crc kubenswrapper[4906]: E0227 08:31:32.553550 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:32 crc kubenswrapper[4906]: E0227 08:31:32.553617 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:32 crc kubenswrapper[4906]: E0227 08:31:32.859916 4906 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 27 08:31:34 crc kubenswrapper[4906]: I0227 08:31:34.551655 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:34 crc kubenswrapper[4906]: I0227 08:31:34.551668 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:34 crc kubenswrapper[4906]: E0227 08:31:34.552612 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:34 crc kubenswrapper[4906]: E0227 08:31:34.552833 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:34 crc kubenswrapper[4906]: I0227 08:31:34.551859 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:34 crc kubenswrapper[4906]: I0227 08:31:34.553146 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:34 crc kubenswrapper[4906]: E0227 08:31:34.553311 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:34 crc kubenswrapper[4906]: E0227 08:31:34.553341 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:36 crc kubenswrapper[4906]: I0227 08:31:36.551752 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:36 crc kubenswrapper[4906]: I0227 08:31:36.551756 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:36 crc kubenswrapper[4906]: I0227 08:31:36.551757 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:36 crc kubenswrapper[4906]: E0227 08:31:36.552902 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:36 crc kubenswrapper[4906]: E0227 08:31:36.552669 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:36 crc kubenswrapper[4906]: I0227 08:31:36.551927 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:36 crc kubenswrapper[4906]: E0227 08:31:36.552960 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:36 crc kubenswrapper[4906]: E0227 08:31:36.553085 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:37 crc kubenswrapper[4906]: I0227 08:31:37.552782 4906 scope.go:117] "RemoveContainer" containerID="bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18" Feb 27 08:31:37 crc kubenswrapper[4906]: E0227 08:31:37.861899 4906 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 27 08:31:38 crc kubenswrapper[4906]: I0227 08:31:38.514830 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-6rvgh"] Feb 27 08:31:38 crc kubenswrapper[4906]: I0227 08:31:38.515042 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:38 crc kubenswrapper[4906]: E0227 08:31:38.515157 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:38 crc kubenswrapper[4906]: I0227 08:31:38.551814 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:38 crc kubenswrapper[4906]: I0227 08:31:38.551945 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:38 crc kubenswrapper[4906]: E0227 08:31:38.551971 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:38 crc kubenswrapper[4906]: I0227 08:31:38.552037 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:38 crc kubenswrapper[4906]: E0227 08:31:38.552119 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:38 crc kubenswrapper[4906]: E0227 08:31:38.552246 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:38 crc kubenswrapper[4906]: I0227 08:31:38.708351 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovnkube-controller/3.log" Feb 27 08:31:38 crc kubenswrapper[4906]: I0227 08:31:38.710390 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-acl-logging/0.log" Feb 27 08:31:38 crc kubenswrapper[4906]: I0227 08:31:38.710988 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerStarted","Data":"1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681"} Feb 27 08:31:38 crc kubenswrapper[4906]: I0227 08:31:38.711537 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:31:38 crc kubenswrapper[4906]: I0227 08:31:38.738498 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" podStartSLOduration=149.73847356 podStartE2EDuration="2m29.73847356s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:31:38.736973646 +0000 UTC m=+197.131375266" watchObservedRunningTime="2026-02-27 08:31:38.73847356 +0000 UTC m=+197.132875180" Feb 27 08:31:39 crc kubenswrapper[4906]: I0227 08:31:39.552160 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:39 crc kubenswrapper[4906]: E0227 08:31:39.552327 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:40 crc kubenswrapper[4906]: I0227 08:31:40.551660 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:40 crc kubenswrapper[4906]: E0227 08:31:40.552242 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:40 crc kubenswrapper[4906]: I0227 08:31:40.551973 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:40 crc kubenswrapper[4906]: E0227 08:31:40.552343 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:40 crc kubenswrapper[4906]: I0227 08:31:40.551805 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:40 crc kubenswrapper[4906]: E0227 08:31:40.552426 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:41 crc kubenswrapper[4906]: I0227 08:31:41.551998 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:41 crc kubenswrapper[4906]: E0227 08:31:41.552402 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:42 crc kubenswrapper[4906]: I0227 08:31:42.552144 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:42 crc kubenswrapper[4906]: I0227 08:31:42.552197 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:42 crc kubenswrapper[4906]: E0227 08:31:42.552374 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:42 crc kubenswrapper[4906]: I0227 08:31:42.552402 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:42 crc kubenswrapper[4906]: E0227 08:31:42.553871 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:42 crc kubenswrapper[4906]: E0227 08:31:42.554090 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:42 crc kubenswrapper[4906]: E0227 08:31:42.862868 4906 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 27 08:31:43 crc kubenswrapper[4906]: I0227 08:31:43.551436 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:43 crc kubenswrapper[4906]: I0227 08:31:43.552237 4906 scope.go:117] "RemoveContainer" containerID="41eedc8fb57fff9ee8587cf308c6bc779385fe45c289d2450f5039e83c71b349" Feb 27 08:31:43 crc kubenswrapper[4906]: E0227 08:31:43.552149 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:43 crc kubenswrapper[4906]: I0227 08:31:43.737615 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6nxxh_a961de01-e505-4c80-96a0-333da958a633/kube-multus/1.log" Feb 27 08:31:43 crc kubenswrapper[4906]: I0227 08:31:43.738593 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-6nxxh" event={"ID":"a961de01-e505-4c80-96a0-333da958a633","Type":"ContainerStarted","Data":"97b39484f9fe72c1088867368903db2cbdc36bc29698fe7c9c901230d5e6bf42"} Feb 27 08:31:44 crc kubenswrapper[4906]: I0227 08:31:44.552137 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:44 crc kubenswrapper[4906]: I0227 08:31:44.552225 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:44 crc kubenswrapper[4906]: E0227 08:31:44.552329 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:44 crc kubenswrapper[4906]: I0227 08:31:44.552441 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:44 crc kubenswrapper[4906]: E0227 08:31:44.552644 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:44 crc kubenswrapper[4906]: E0227 08:31:44.552742 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:45 crc kubenswrapper[4906]: I0227 08:31:45.551544 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:45 crc kubenswrapper[4906]: E0227 08:31:45.551758 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:46 crc kubenswrapper[4906]: I0227 08:31:46.551167 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:46 crc kubenswrapper[4906]: I0227 08:31:46.551201 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:46 crc kubenswrapper[4906]: E0227 08:31:46.551323 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 27 08:31:46 crc kubenswrapper[4906]: I0227 08:31:46.551397 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:46 crc kubenswrapper[4906]: E0227 08:31:46.551565 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 27 08:31:46 crc kubenswrapper[4906]: E0227 08:31:46.551623 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 27 08:31:47 crc kubenswrapper[4906]: I0227 08:31:47.551718 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:47 crc kubenswrapper[4906]: E0227 08:31:47.552007 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-6rvgh" podUID="9bb2ded7-f8fe-4978-81cd-08cafe0fe124" Feb 27 08:31:48 crc kubenswrapper[4906]: I0227 08:31:48.551490 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:31:48 crc kubenswrapper[4906]: I0227 08:31:48.551661 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:31:48 crc kubenswrapper[4906]: I0227 08:31:48.552200 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:31:48 crc kubenswrapper[4906]: I0227 08:31:48.555124 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Feb 27 08:31:48 crc kubenswrapper[4906]: I0227 08:31:48.556595 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Feb 27 08:31:48 crc kubenswrapper[4906]: I0227 08:31:48.556631 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Feb 27 08:31:48 crc kubenswrapper[4906]: I0227 08:31:48.557481 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Feb 27 08:31:49 crc kubenswrapper[4906]: I0227 08:31:49.552219 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:31:49 crc kubenswrapper[4906]: I0227 08:31:49.555598 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Feb 27 08:31:49 crc kubenswrapper[4906]: I0227 08:31:49.555858 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Feb 27 08:31:54 crc kubenswrapper[4906]: I0227 08:31:54.819770 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.017756 4906 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.063206 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zl4pc"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.063867 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.068368 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.068604 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.068420 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.068987 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.069404 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.071958 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.072223 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.072693 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.072948 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.073275 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.076180 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.076603 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.080379 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qvd5m"] Feb 27 08:31:58 crc kubenswrapper[4906]: W0227 08:31:58.081276 4906 reflector.go:561] object-"openshift-route-controller-manager"/"config": failed to list *v1.ConfigMap: configmaps "config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Feb 27 08:31:58 crc kubenswrapper[4906]: E0227 08:31:58.081785 4906 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Feb 27 08:31:58 crc kubenswrapper[4906]: W0227 08:31:58.081424 4906 reflector.go:561] object-"openshift-route-controller-manager"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Feb 27 08:31:58 crc kubenswrapper[4906]: E0227 08:31:58.081821 4906 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.082137 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.082537 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.082681 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.082888 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-l9pjw"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.082972 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.083492 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-l9pjw" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.083505 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.084778 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-6lqtc"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.085606 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-6lqtc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.085722 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.085992 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-925p5"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.086656 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-925p5" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.086929 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-xl55p"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.087812 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xl55p" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.088534 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.089442 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dq899"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.090069 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-dq899" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.090617 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.092144 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.092239 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.092347 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.092420 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-r4ppj"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.092468 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.092505 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.092827 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.093090 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-r4ppj" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.097512 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tns4l"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.098593 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-r8pwh"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.099185 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.099240 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.099469 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.108929 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tns4l" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.110513 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-pb9n9"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.113676 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-knttz"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.114502 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.115318 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-r8pwh" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.123993 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pb9n9" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.124322 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.136392 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.138509 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.139065 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lz5rn"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.139491 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.139605 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.140936 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.148751 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-xx8qm"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.149741 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.149958 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.150375 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cr7bl"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.151157 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cr7bl" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.151171 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-hkv6l"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.151792 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-hkv6l" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.153246 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.153499 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.153635 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.153757 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.157890 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.158057 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.158164 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.158181 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.158646 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.158747 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.158830 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.158989 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.159219 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.159340 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.159453 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.159563 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.159676 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.159796 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.159844 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.159933 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.160249 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.160424 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.160550 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.160620 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.160654 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.160674 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.160775 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.160788 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.160558 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.160902 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.160958 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.161077 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.161269 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.161506 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.161632 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.161717 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.161753 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.161946 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.162129 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.162263 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.162410 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.162548 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.162640 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.162753 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.162909 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165440 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d8e9a89a-0411-4456-9637-08712b004662-client-ca\") pod \"route-controller-manager-6576b87f9c-b7qtg\" (UID: \"d8e9a89a-0411-4456-9637-08712b004662\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165518 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0f8b1512-2590-418e-8504-70ef3c1567b0-audit-dir\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165542 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165585 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mh42q\" (UniqueName: \"kubernetes.io/projected/543292c6-d79b-4198-932b-fdc68dcbbde5-kube-api-access-mh42q\") pod \"openshift-config-operator-7777fb866f-xl55p\" (UID: \"543292c6-d79b-4198-932b-fdc68dcbbde5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xl55p" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165608 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165626 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/78c06cb9-944c-4f1c-a705-a7264c1c87ff-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-925p5\" (UID: \"78c06cb9-944c-4f1c-a705-a7264c1c87ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-925p5" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165661 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hqd26\" (UniqueName: \"kubernetes.io/projected/78c06cb9-944c-4f1c-a705-a7264c1c87ff-kube-api-access-hqd26\") pod \"authentication-operator-69f744f599-925p5\" (UID: \"78c06cb9-944c-4f1c-a705-a7264c1c87ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-925p5" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165681 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/177eae99-352f-45fe-9593-3e8ad8345bc6-config\") pod \"etcd-operator-b45778765-knttz\" (UID: \"177eae99-352f-45fe-9593-3e8ad8345bc6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165699 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78c06cb9-944c-4f1c-a705-a7264c1c87ff-config\") pod \"authentication-operator-69f744f599-925p5\" (UID: \"78c06cb9-944c-4f1c-a705-a7264c1c87ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-925p5" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165716 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vcg2p\" (UniqueName: \"kubernetes.io/projected/4ceb7b65-ef46-4eb8-a129-d462e1989488-kube-api-access-vcg2p\") pod \"openshift-controller-manager-operator-756b6f6bc6-tns4l\" (UID: \"4ceb7b65-ef46-4eb8-a129-d462e1989488\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tns4l" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165732 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fd02452f-1a87-43a5-8f32-3b63d9f522e3-etcd-client\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165749 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fd02452f-1a87-43a5-8f32-3b63d9f522e3-encryption-config\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165768 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4px78\" (UniqueName: \"kubernetes.io/projected/390464d8-fc1d-443a-85f7-7164ac4e2d05-kube-api-access-4px78\") pod \"controller-manager-879f6c89f-qvd5m\" (UID: \"390464d8-fc1d-443a-85f7-7164ac4e2d05\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165788 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-serving-cert\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165803 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-audit-dir\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165821 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8e9a89a-0411-4456-9637-08712b004662-serving-cert\") pod \"route-controller-manager-6576b87f9c-b7qtg\" (UID: \"d8e9a89a-0411-4456-9637-08712b004662\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165841 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntr47\" (UniqueName: \"kubernetes.io/projected/d8e9a89a-0411-4456-9637-08712b004662-kube-api-access-ntr47\") pod \"route-controller-manager-6576b87f9c-b7qtg\" (UID: \"d8e9a89a-0411-4456-9637-08712b004662\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165856 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/390464d8-fc1d-443a-85f7-7164ac4e2d05-config\") pod \"controller-manager-879f6c89f-qvd5m\" (UID: \"390464d8-fc1d-443a-85f7-7164ac4e2d05\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165872 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/390464d8-fc1d-443a-85f7-7164ac4e2d05-client-ca\") pod \"controller-manager-879f6c89f-qvd5m\" (UID: \"390464d8-fc1d-443a-85f7-7164ac4e2d05\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165930 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ceb7b65-ef46-4eb8-a129-d462e1989488-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-tns4l\" (UID: \"4ceb7b65-ef46-4eb8-a129-d462e1989488\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tns4l" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165946 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165961 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7pwpx\" (UniqueName: \"kubernetes.io/projected/fd02452f-1a87-43a5-8f32-3b63d9f522e3-kube-api-access-7pwpx\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.165984 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pn8f9\" (UniqueName: \"kubernetes.io/projected/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-kube-api-access-pn8f9\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166005 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea522559-4f3c-4d90-ae46-aa2a9f27b243-config\") pod \"machine-api-operator-5694c8668f-6lqtc\" (UID: \"ea522559-4f3c-4d90-ae46-aa2a9f27b243\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6lqtc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166024 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/fd02452f-1a87-43a5-8f32-3b63d9f522e3-audit\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166038 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/543292c6-d79b-4198-932b-fdc68dcbbde5-serving-cert\") pod \"openshift-config-operator-7777fb866f-xl55p\" (UID: \"543292c6-d79b-4198-932b-fdc68dcbbde5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xl55p" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166054 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166074 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09bfe857-0a67-4902-a9b9-5738a2074657-config\") pod \"console-operator-58897d9998-dq899\" (UID: \"09bfe857-0a67-4902-a9b9-5738a2074657\") " pod="openshift-console-operator/console-operator-58897d9998-dq899" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166089 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8e9a89a-0411-4456-9637-08712b004662-config\") pod \"route-controller-manager-6576b87f9c-b7qtg\" (UID: \"d8e9a89a-0411-4456-9637-08712b004662\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166109 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6250048-cedf-4ca4-86f2-b0d45534d374-config\") pod \"openshift-apiserver-operator-796bbdcf4f-l9pjw\" (UID: \"d6250048-cedf-4ca4-86f2-b0d45534d374\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-l9pjw" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166125 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78c06cb9-944c-4f1c-a705-a7264c1c87ff-serving-cert\") pod \"authentication-operator-69f744f599-925p5\" (UID: \"78c06cb9-944c-4f1c-a705-a7264c1c87ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-925p5" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166143 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166159 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-etcd-client\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166174 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/177eae99-352f-45fe-9593-3e8ad8345bc6-etcd-ca\") pod \"etcd-operator-b45778765-knttz\" (UID: \"177eae99-352f-45fe-9593-3e8ad8345bc6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166192 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bs75s\" (UniqueName: \"kubernetes.io/projected/ce2700f0-be98-4622-a729-99b696e7ecb9-kube-api-access-bs75s\") pod \"machine-approver-56656f9798-pb9n9\" (UID: \"ce2700f0-be98-4622-a729-99b696e7ecb9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pb9n9" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166210 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/fd02452f-1a87-43a5-8f32-3b63d9f522e3-image-import-ca\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166245 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/177eae99-352f-45fe-9593-3e8ad8345bc6-etcd-client\") pod \"etcd-operator-b45778765-knttz\" (UID: \"177eae99-352f-45fe-9593-3e8ad8345bc6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166260 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fd02452f-1a87-43a5-8f32-3b63d9f522e3-node-pullsecrets\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166303 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtdcl\" (UniqueName: \"kubernetes.io/projected/09bfe857-0a67-4902-a9b9-5738a2074657-kube-api-access-jtdcl\") pod \"console-operator-58897d9998-dq899\" (UID: \"09bfe857-0a67-4902-a9b9-5738a2074657\") " pod="openshift-console-operator/console-operator-58897d9998-dq899" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166323 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fd02452f-1a87-43a5-8f32-3b63d9f522e3-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166341 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-audit-policies\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166370 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fd02452f-1a87-43a5-8f32-3b63d9f522e3-serving-cert\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166387 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/177eae99-352f-45fe-9593-3e8ad8345bc6-etcd-service-ca\") pod \"etcd-operator-b45778765-knttz\" (UID: \"177eae99-352f-45fe-9593-3e8ad8345bc6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166402 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166417 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166436 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6250048-cedf-4ca4-86f2-b0d45534d374-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-l9pjw\" (UID: \"d6250048-cedf-4ca4-86f2-b0d45534d374\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-l9pjw" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166455 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166482 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6bsr7\" (UniqueName: \"kubernetes.io/projected/177eae99-352f-45fe-9593-3e8ad8345bc6-kube-api-access-6bsr7\") pod \"etcd-operator-b45778765-knttz\" (UID: \"177eae99-352f-45fe-9593-3e8ad8345bc6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166499 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4ceb7b65-ef46-4eb8-a129-d462e1989488-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-tns4l\" (UID: \"4ceb7b65-ef46-4eb8-a129-d462e1989488\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tns4l" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166517 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fd02452f-1a87-43a5-8f32-3b63d9f522e3-etcd-serving-ca\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166531 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-encryption-config\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166550 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zb5l\" (UniqueName: \"kubernetes.io/projected/16f8b49f-8dc0-4aa5-bb80-9642e2688141-kube-api-access-9zb5l\") pod \"dns-operator-744455d44c-r4ppj\" (UID: \"16f8b49f-8dc0-4aa5-bb80-9642e2688141\") " pod="openshift-dns-operator/dns-operator-744455d44c-r4ppj" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166570 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/177eae99-352f-45fe-9593-3e8ad8345bc6-serving-cert\") pod \"etcd-operator-b45778765-knttz\" (UID: \"177eae99-352f-45fe-9593-3e8ad8345bc6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166588 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/390464d8-fc1d-443a-85f7-7164ac4e2d05-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-qvd5m\" (UID: \"390464d8-fc1d-443a-85f7-7164ac4e2d05\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166606 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166633 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbqjw\" (UniqueName: \"kubernetes.io/projected/d6250048-cedf-4ca4-86f2-b0d45534d374-kube-api-access-nbqjw\") pod \"openshift-apiserver-operator-796bbdcf4f-l9pjw\" (UID: \"d6250048-cedf-4ca4-86f2-b0d45534d374\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-l9pjw" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166700 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce2700f0-be98-4622-a729-99b696e7ecb9-config\") pod \"machine-approver-56656f9798-pb9n9\" (UID: \"ce2700f0-be98-4622-a729-99b696e7ecb9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pb9n9" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166905 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ce294532-f4f2-46cf-b046-e81aa81f0f0c-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-r8pwh\" (UID: \"ce294532-f4f2-46cf-b046-e81aa81f0f0c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-r8pwh" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166946 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09bfe857-0a67-4902-a9b9-5738a2074657-serving-cert\") pod \"console-operator-58897d9998-dq899\" (UID: \"09bfe857-0a67-4902-a9b9-5738a2074657\") " pod="openshift-console-operator/console-operator-58897d9998-dq899" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.166972 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ea522559-4f3c-4d90-ae46-aa2a9f27b243-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-6lqtc\" (UID: \"ea522559-4f3c-4d90-ae46-aa2a9f27b243\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6lqtc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.167009 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/543292c6-d79b-4198-932b-fdc68dcbbde5-available-featuregates\") pod \"openshift-config-operator-7777fb866f-xl55p\" (UID: \"543292c6-d79b-4198-932b-fdc68dcbbde5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xl55p" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.167040 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/16f8b49f-8dc0-4aa5-bb80-9642e2688141-metrics-tls\") pod \"dns-operator-744455d44c-r4ppj\" (UID: \"16f8b49f-8dc0-4aa5-bb80-9642e2688141\") " pod="openshift-dns-operator/dns-operator-744455d44c-r4ppj" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.167078 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/78c06cb9-944c-4f1c-a705-a7264c1c87ff-service-ca-bundle\") pod \"authentication-operator-69f744f599-925p5\" (UID: \"78c06cb9-944c-4f1c-a705-a7264c1c87ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-925p5" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.167110 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slgzs\" (UniqueName: \"kubernetes.io/projected/ce294532-f4f2-46cf-b046-e81aa81f0f0c-kube-api-access-slgzs\") pod \"cluster-samples-operator-665b6dd947-r8pwh\" (UID: \"ce294532-f4f2-46cf-b046-e81aa81f0f0c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-r8pwh" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.167133 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ea522559-4f3c-4d90-ae46-aa2a9f27b243-images\") pod \"machine-api-operator-5694c8668f-6lqtc\" (UID: \"ea522559-4f3c-4d90-ae46-aa2a9f27b243\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6lqtc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.167173 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/390464d8-fc1d-443a-85f7-7164ac4e2d05-serving-cert\") pod \"controller-manager-879f6c89f-qvd5m\" (UID: \"390464d8-fc1d-443a-85f7-7164ac4e2d05\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.167196 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.167258 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tx2h\" (UniqueName: \"kubernetes.io/projected/ea522559-4f3c-4d90-ae46-aa2a9f27b243-kube-api-access-8tx2h\") pod \"machine-api-operator-5694c8668f-6lqtc\" (UID: \"ea522559-4f3c-4d90-ae46-aa2a9f27b243\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6lqtc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.167289 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.167311 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hn86w\" (UniqueName: \"kubernetes.io/projected/0f8b1512-2590-418e-8504-70ef3c1567b0-kube-api-access-hn86w\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.167345 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ce2700f0-be98-4622-a729-99b696e7ecb9-auth-proxy-config\") pod \"machine-approver-56656f9798-pb9n9\" (UID: \"ce2700f0-be98-4622-a729-99b696e7ecb9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pb9n9" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.167382 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd02452f-1a87-43a5-8f32-3b63d9f522e3-config\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.167412 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-audit-policies\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.167433 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.167458 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/09bfe857-0a67-4902-a9b9-5738a2074657-trusted-ca\") pod \"console-operator-58897d9998-dq899\" (UID: \"09bfe857-0a67-4902-a9b9-5738a2074657\") " pod="openshift-console-operator/console-operator-58897d9998-dq899" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.167488 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.167510 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fd02452f-1a87-43a5-8f32-3b63d9f522e3-audit-dir\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.167534 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ce2700f0-be98-4622-a729-99b696e7ecb9-machine-approver-tls\") pod \"machine-approver-56656f9798-pb9n9\" (UID: \"ce2700f0-be98-4622-a729-99b696e7ecb9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pb9n9" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.168034 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.168255 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.168345 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.168432 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.168454 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.168513 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.168620 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.168633 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.169391 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.169626 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.169686 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.169754 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.169846 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.169873 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.169982 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.170018 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.170334 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.171118 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.171437 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.171610 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.173424 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.174387 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-km47t"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.174981 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.176615 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.177598 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.179102 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.208763 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zl4pc"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.213811 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2vdrh"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.216831 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.242509 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-79trw"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.243195 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-79trw" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.243855 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.244341 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2vdrh" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.246387 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wvwrb"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.247410 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wvwrb" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.248811 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.249011 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.249033 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-ppcv6"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.249871 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.250609 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.250671 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.251058 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.251193 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.249917 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ppcv6" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.256028 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pqnpm"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.256965 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.257193 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.260151 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-bm27t"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.260980 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.263138 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p78ck"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.264904 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p78ck" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.266101 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.266943 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dkx2c"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.267645 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dkx2c" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268125 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/16f8b49f-8dc0-4aa5-bb80-9642e2688141-metrics-tls\") pod \"dns-operator-744455d44c-r4ppj\" (UID: \"16f8b49f-8dc0-4aa5-bb80-9642e2688141\") " pod="openshift-dns-operator/dns-operator-744455d44c-r4ppj" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268165 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/78c06cb9-944c-4f1c-a705-a7264c1c87ff-service-ca-bundle\") pod \"authentication-operator-69f744f599-925p5\" (UID: \"78c06cb9-944c-4f1c-a705-a7264c1c87ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-925p5" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268186 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slgzs\" (UniqueName: \"kubernetes.io/projected/ce294532-f4f2-46cf-b046-e81aa81f0f0c-kube-api-access-slgzs\") pod \"cluster-samples-operator-665b6dd947-r8pwh\" (UID: \"ce294532-f4f2-46cf-b046-e81aa81f0f0c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-r8pwh" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268203 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ea522559-4f3c-4d90-ae46-aa2a9f27b243-images\") pod \"machine-api-operator-5694c8668f-6lqtc\" (UID: \"ea522559-4f3c-4d90-ae46-aa2a9f27b243\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6lqtc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268221 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268242 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/390464d8-fc1d-443a-85f7-7164ac4e2d05-serving-cert\") pod \"controller-manager-879f6c89f-qvd5m\" (UID: \"390464d8-fc1d-443a-85f7-7164ac4e2d05\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268260 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tx2h\" (UniqueName: \"kubernetes.io/projected/ea522559-4f3c-4d90-ae46-aa2a9f27b243-kube-api-access-8tx2h\") pod \"machine-api-operator-5694c8668f-6lqtc\" (UID: \"ea522559-4f3c-4d90-ae46-aa2a9f27b243\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6lqtc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268274 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268289 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hn86w\" (UniqueName: \"kubernetes.io/projected/0f8b1512-2590-418e-8504-70ef3c1567b0-kube-api-access-hn86w\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268305 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ce2700f0-be98-4622-a729-99b696e7ecb9-auth-proxy-config\") pod \"machine-approver-56656f9798-pb9n9\" (UID: \"ce2700f0-be98-4622-a729-99b696e7ecb9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pb9n9" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268322 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd02452f-1a87-43a5-8f32-3b63d9f522e3-config\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268340 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fd02452f-1a87-43a5-8f32-3b63d9f522e3-audit-dir\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268356 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-audit-policies\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268372 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268389 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/09bfe857-0a67-4902-a9b9-5738a2074657-trusted-ca\") pod \"console-operator-58897d9998-dq899\" (UID: \"09bfe857-0a67-4902-a9b9-5738a2074657\") " pod="openshift-console-operator/console-operator-58897d9998-dq899" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268405 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268423 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ce2700f0-be98-4622-a729-99b696e7ecb9-machine-approver-tls\") pod \"machine-approver-56656f9798-pb9n9\" (UID: \"ce2700f0-be98-4622-a729-99b696e7ecb9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pb9n9" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268439 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268468 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d8e9a89a-0411-4456-9637-08712b004662-client-ca\") pod \"route-controller-manager-6576b87f9c-b7qtg\" (UID: \"d8e9a89a-0411-4456-9637-08712b004662\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268484 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0f8b1512-2590-418e-8504-70ef3c1567b0-audit-dir\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268501 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mh42q\" (UniqueName: \"kubernetes.io/projected/543292c6-d79b-4198-932b-fdc68dcbbde5-kube-api-access-mh42q\") pod \"openshift-config-operator-7777fb866f-xl55p\" (UID: \"543292c6-d79b-4198-932b-fdc68dcbbde5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xl55p" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268520 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268540 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78c06cb9-944c-4f1c-a705-a7264c1c87ff-config\") pod \"authentication-operator-69f744f599-925p5\" (UID: \"78c06cb9-944c-4f1c-a705-a7264c1c87ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-925p5" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268557 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/78c06cb9-944c-4f1c-a705-a7264c1c87ff-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-925p5\" (UID: \"78c06cb9-944c-4f1c-a705-a7264c1c87ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-925p5" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268575 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hqd26\" (UniqueName: \"kubernetes.io/projected/78c06cb9-944c-4f1c-a705-a7264c1c87ff-kube-api-access-hqd26\") pod \"authentication-operator-69f744f599-925p5\" (UID: \"78c06cb9-944c-4f1c-a705-a7264c1c87ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-925p5" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268593 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/177eae99-352f-45fe-9593-3e8ad8345bc6-config\") pod \"etcd-operator-b45778765-knttz\" (UID: \"177eae99-352f-45fe-9593-3e8ad8345bc6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268611 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vcg2p\" (UniqueName: \"kubernetes.io/projected/4ceb7b65-ef46-4eb8-a129-d462e1989488-kube-api-access-vcg2p\") pod \"openshift-controller-manager-operator-756b6f6bc6-tns4l\" (UID: \"4ceb7b65-ef46-4eb8-a129-d462e1989488\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tns4l" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268628 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fd02452f-1a87-43a5-8f32-3b63d9f522e3-etcd-client\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268647 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fd02452f-1a87-43a5-8f32-3b63d9f522e3-encryption-config\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268667 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4px78\" (UniqueName: \"kubernetes.io/projected/390464d8-fc1d-443a-85f7-7164ac4e2d05-kube-api-access-4px78\") pod \"controller-manager-879f6c89f-qvd5m\" (UID: \"390464d8-fc1d-443a-85f7-7164ac4e2d05\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268686 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8e9a89a-0411-4456-9637-08712b004662-serving-cert\") pod \"route-controller-manager-6576b87f9c-b7qtg\" (UID: \"d8e9a89a-0411-4456-9637-08712b004662\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268705 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-serving-cert\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268722 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-audit-dir\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268742 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/390464d8-fc1d-443a-85f7-7164ac4e2d05-client-ca\") pod \"controller-manager-879f6c89f-qvd5m\" (UID: \"390464d8-fc1d-443a-85f7-7164ac4e2d05\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268760 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntr47\" (UniqueName: \"kubernetes.io/projected/d8e9a89a-0411-4456-9637-08712b004662-kube-api-access-ntr47\") pod \"route-controller-manager-6576b87f9c-b7qtg\" (UID: \"d8e9a89a-0411-4456-9637-08712b004662\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268776 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/390464d8-fc1d-443a-85f7-7164ac4e2d05-config\") pod \"controller-manager-879f6c89f-qvd5m\" (UID: \"390464d8-fc1d-443a-85f7-7164ac4e2d05\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268793 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ceb7b65-ef46-4eb8-a129-d462e1989488-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-tns4l\" (UID: \"4ceb7b65-ef46-4eb8-a129-d462e1989488\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tns4l" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268811 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268827 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7pwpx\" (UniqueName: \"kubernetes.io/projected/fd02452f-1a87-43a5-8f32-3b63d9f522e3-kube-api-access-7pwpx\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268842 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/543292c6-d79b-4198-932b-fdc68dcbbde5-serving-cert\") pod \"openshift-config-operator-7777fb866f-xl55p\" (UID: \"543292c6-d79b-4198-932b-fdc68dcbbde5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xl55p" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268858 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pn8f9\" (UniqueName: \"kubernetes.io/projected/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-kube-api-access-pn8f9\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268896 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea522559-4f3c-4d90-ae46-aa2a9f27b243-config\") pod \"machine-api-operator-5694c8668f-6lqtc\" (UID: \"ea522559-4f3c-4d90-ae46-aa2a9f27b243\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6lqtc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268917 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/fd02452f-1a87-43a5-8f32-3b63d9f522e3-audit\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268933 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8e9a89a-0411-4456-9637-08712b004662-config\") pod \"route-controller-manager-6576b87f9c-b7qtg\" (UID: \"d8e9a89a-0411-4456-9637-08712b004662\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268951 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268974 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09bfe857-0a67-4902-a9b9-5738a2074657-config\") pod \"console-operator-58897d9998-dq899\" (UID: \"09bfe857-0a67-4902-a9b9-5738a2074657\") " pod="openshift-console-operator/console-operator-58897d9998-dq899" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.268994 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6250048-cedf-4ca4-86f2-b0d45534d374-config\") pod \"openshift-apiserver-operator-796bbdcf4f-l9pjw\" (UID: \"d6250048-cedf-4ca4-86f2-b0d45534d374\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-l9pjw" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.269010 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78c06cb9-944c-4f1c-a705-a7264c1c87ff-serving-cert\") pod \"authentication-operator-69f744f599-925p5\" (UID: \"78c06cb9-944c-4f1c-a705-a7264c1c87ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-925p5" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.269026 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.269041 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-etcd-client\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.269061 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/177eae99-352f-45fe-9593-3e8ad8345bc6-etcd-ca\") pod \"etcd-operator-b45778765-knttz\" (UID: \"177eae99-352f-45fe-9593-3e8ad8345bc6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.269078 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bs75s\" (UniqueName: \"kubernetes.io/projected/ce2700f0-be98-4622-a729-99b696e7ecb9-kube-api-access-bs75s\") pod \"machine-approver-56656f9798-pb9n9\" (UID: \"ce2700f0-be98-4622-a729-99b696e7ecb9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pb9n9" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.269093 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/fd02452f-1a87-43a5-8f32-3b63d9f522e3-image-import-ca\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.269125 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/177eae99-352f-45fe-9593-3e8ad8345bc6-etcd-client\") pod \"etcd-operator-b45778765-knttz\" (UID: \"177eae99-352f-45fe-9593-3e8ad8345bc6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.269141 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fd02452f-1a87-43a5-8f32-3b63d9f522e3-node-pullsecrets\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.269159 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtdcl\" (UniqueName: \"kubernetes.io/projected/09bfe857-0a67-4902-a9b9-5738a2074657-kube-api-access-jtdcl\") pod \"console-operator-58897d9998-dq899\" (UID: \"09bfe857-0a67-4902-a9b9-5738a2074657\") " pod="openshift-console-operator/console-operator-58897d9998-dq899" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.269179 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fd02452f-1a87-43a5-8f32-3b63d9f522e3-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.269199 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-audit-policies\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.269217 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fd02452f-1a87-43a5-8f32-3b63d9f522e3-serving-cert\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.269225 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.269300 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.270329 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/78c06cb9-944c-4f1c-a705-a7264c1c87ff-service-ca-bundle\") pod \"authentication-operator-69f744f599-925p5\" (UID: \"78c06cb9-944c-4f1c-a705-a7264c1c87ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-925p5" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.271069 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-q826k"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.271233 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ea522559-4f3c-4d90-ae46-aa2a9f27b243-images\") pod \"machine-api-operator-5694c8668f-6lqtc\" (UID: \"ea522559-4f3c-4d90-ae46-aa2a9f27b243\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6lqtc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.271906 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.272151 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.273509 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.273706 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.269237 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/177eae99-352f-45fe-9593-3e8ad8345bc6-etcd-service-ca\") pod \"etcd-operator-b45778765-knttz\" (UID: \"177eae99-352f-45fe-9593-3e8ad8345bc6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.273857 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.273901 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.273937 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.273965 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6250048-cedf-4ca4-86f2-b0d45534d374-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-l9pjw\" (UID: \"d6250048-cedf-4ca4-86f2-b0d45534d374\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-l9pjw" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.274004 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6bsr7\" (UniqueName: \"kubernetes.io/projected/177eae99-352f-45fe-9593-3e8ad8345bc6-kube-api-access-6bsr7\") pod \"etcd-operator-b45778765-knttz\" (UID: \"177eae99-352f-45fe-9593-3e8ad8345bc6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.274024 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4ceb7b65-ef46-4eb8-a129-d462e1989488-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-tns4l\" (UID: \"4ceb7b65-ef46-4eb8-a129-d462e1989488\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tns4l" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.274057 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fd02452f-1a87-43a5-8f32-3b63d9f522e3-etcd-serving-ca\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.274076 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-encryption-config\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.274100 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zb5l\" (UniqueName: \"kubernetes.io/projected/16f8b49f-8dc0-4aa5-bb80-9642e2688141-kube-api-access-9zb5l\") pod \"dns-operator-744455d44c-r4ppj\" (UID: \"16f8b49f-8dc0-4aa5-bb80-9642e2688141\") " pod="openshift-dns-operator/dns-operator-744455d44c-r4ppj" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.274118 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/177eae99-352f-45fe-9593-3e8ad8345bc6-serving-cert\") pod \"etcd-operator-b45778765-knttz\" (UID: \"177eae99-352f-45fe-9593-3e8ad8345bc6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.274136 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/390464d8-fc1d-443a-85f7-7164ac4e2d05-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-qvd5m\" (UID: \"390464d8-fc1d-443a-85f7-7164ac4e2d05\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.274161 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce2700f0-be98-4622-a729-99b696e7ecb9-config\") pod \"machine-approver-56656f9798-pb9n9\" (UID: \"ce2700f0-be98-4622-a729-99b696e7ecb9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pb9n9" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.274180 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.274214 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbqjw\" (UniqueName: \"kubernetes.io/projected/d6250048-cedf-4ca4-86f2-b0d45534d374-kube-api-access-nbqjw\") pod \"openshift-apiserver-operator-796bbdcf4f-l9pjw\" (UID: \"d6250048-cedf-4ca4-86f2-b0d45534d374\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-l9pjw" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.274235 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ce294532-f4f2-46cf-b046-e81aa81f0f0c-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-r8pwh\" (UID: \"ce294532-f4f2-46cf-b046-e81aa81f0f0c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-r8pwh" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.274253 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09bfe857-0a67-4902-a9b9-5738a2074657-serving-cert\") pod \"console-operator-58897d9998-dq899\" (UID: \"09bfe857-0a67-4902-a9b9-5738a2074657\") " pod="openshift-console-operator/console-operator-58897d9998-dq899" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.274485 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.274273 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ea522559-4f3c-4d90-ae46-aa2a9f27b243-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-6lqtc\" (UID: \"ea522559-4f3c-4d90-ae46-aa2a9f27b243\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6lqtc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.276093 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-q826k" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.276134 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/543292c6-d79b-4198-932b-fdc68dcbbde5-available-featuregates\") pod \"openshift-config-operator-7777fb866f-xl55p\" (UID: \"543292c6-d79b-4198-932b-fdc68dcbbde5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xl55p" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.275987 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/390464d8-fc1d-443a-85f7-7164ac4e2d05-client-ca\") pod \"controller-manager-879f6c89f-qvd5m\" (UID: \"390464d8-fc1d-443a-85f7-7164ac4e2d05\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.275972 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.277247 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ce2700f0-be98-4622-a729-99b696e7ecb9-auth-proxy-config\") pod \"machine-approver-56656f9798-pb9n9\" (UID: \"ce2700f0-be98-4622-a729-99b696e7ecb9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pb9n9" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.277337 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d6250048-cedf-4ca4-86f2-b0d45534d374-config\") pod \"openshift-apiserver-operator-796bbdcf4f-l9pjw\" (UID: \"d6250048-cedf-4ca4-86f2-b0d45534d374\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-l9pjw" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.277921 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.277924 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.278771 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/16f8b49f-8dc0-4aa5-bb80-9642e2688141-metrics-tls\") pod \"dns-operator-744455d44c-r4ppj\" (UID: \"16f8b49f-8dc0-4aa5-bb80-9642e2688141\") " pod="openshift-dns-operator/dns-operator-744455d44c-r4ppj" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.280772 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/fd02452f-1a87-43a5-8f32-3b63d9f522e3-etcd-serving-ca\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.275802 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s8vkh"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.281937 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce2700f0-be98-4622-a729-99b696e7ecb9-config\") pod \"machine-approver-56656f9798-pb9n9\" (UID: \"ce2700f0-be98-4622-a729-99b696e7ecb9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pb9n9" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.282872 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/390464d8-fc1d-443a-85f7-7164ac4e2d05-config\") pod \"controller-manager-879f6c89f-qvd5m\" (UID: \"390464d8-fc1d-443a-85f7-7164ac4e2d05\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.283091 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s8vkh" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.285000 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/543292c6-d79b-4198-932b-fdc68dcbbde5-available-featuregates\") pod \"openshift-config-operator-7777fb866f-xl55p\" (UID: \"543292c6-d79b-4198-932b-fdc68dcbbde5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xl55p" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.285380 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78c06cb9-944c-4f1c-a705-a7264c1c87ff-config\") pod \"authentication-operator-69f744f599-925p5\" (UID: \"78c06cb9-944c-4f1c-a705-a7264c1c87ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-925p5" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.285789 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/390464d8-fc1d-443a-85f7-7164ac4e2d05-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-qvd5m\" (UID: \"390464d8-fc1d-443a-85f7-7164ac4e2d05\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.286112 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4ceb7b65-ef46-4eb8-a129-d462e1989488-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-tns4l\" (UID: \"4ceb7b65-ef46-4eb8-a129-d462e1989488\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tns4l" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.286187 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d6250048-cedf-4ca4-86f2-b0d45534d374-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-l9pjw\" (UID: \"d6250048-cedf-4ca4-86f2-b0d45534d374\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-l9pjw" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.286461 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.286598 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.286827 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.286981 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/ce294532-f4f2-46cf-b046-e81aa81f0f0c-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-r8pwh\" (UID: \"ce294532-f4f2-46cf-b046-e81aa81f0f0c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-r8pwh" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.287063 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/390464d8-fc1d-443a-85f7-7164ac4e2d05-serving-cert\") pod \"controller-manager-879f6c89f-qvd5m\" (UID: \"390464d8-fc1d-443a-85f7-7164ac4e2d05\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.287666 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-audit-policies\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.287727 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/fd02452f-1a87-43a5-8f32-3b63d9f522e3-audit-dir\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.287912 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fd02452f-1a87-43a5-8f32-3b63d9f522e3-config\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.288340 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.288383 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-rk76z"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.288563 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/09bfe857-0a67-4902-a9b9-5738a2074657-trusted-ca\") pod \"console-operator-58897d9998-dq899\" (UID: \"09bfe857-0a67-4902-a9b9-5738a2074657\") " pod="openshift-console-operator/console-operator-58897d9998-dq899" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.288787 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/fd02452f-1a87-43a5-8f32-3b63d9f522e3-image-import-ca\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.289477 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/177eae99-352f-45fe-9593-3e8ad8345bc6-etcd-ca\") pod \"etcd-operator-b45778765-knttz\" (UID: \"177eae99-352f-45fe-9593-3e8ad8345bc6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.290373 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-rk76z" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.290804 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ea522559-4f3c-4d90-ae46-aa2a9f27b243-config\") pod \"machine-api-operator-5694c8668f-6lqtc\" (UID: \"ea522559-4f3c-4d90-ae46-aa2a9f27b243\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6lqtc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.290975 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-encryption-config\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.291231 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/ea522559-4f3c-4d90-ae46-aa2a9f27b243-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-6lqtc\" (UID: \"ea522559-4f3c-4d90-ae46-aa2a9f27b243\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6lqtc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.292008 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/fd02452f-1a87-43a5-8f32-3b63d9f522e3-node-pullsecrets\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.292128 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.292926 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09bfe857-0a67-4902-a9b9-5738a2074657-config\") pod \"console-operator-58897d9998-dq899\" (UID: \"09bfe857-0a67-4902-a9b9-5738a2074657\") " pod="openshift-console-operator/console-operator-58897d9998-dq899" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.292947 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/177eae99-352f-45fe-9593-3e8ad8345bc6-serving-cert\") pod \"etcd-operator-b45778765-knttz\" (UID: \"177eae99-352f-45fe-9593-3e8ad8345bc6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.293400 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-audit-policies\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.293524 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/177eae99-352f-45fe-9593-3e8ad8345bc6-config\") pod \"etcd-operator-b45778765-knttz\" (UID: \"177eae99-352f-45fe-9593-3e8ad8345bc6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.293701 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/fd02452f-1a87-43a5-8f32-3b63d9f522e3-trusted-ca-bundle\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.294205 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4ceb7b65-ef46-4eb8-a129-d462e1989488-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-tns4l\" (UID: \"4ceb7b65-ef46-4eb8-a129-d462e1989488\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tns4l" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.294417 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-dn92s"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.294449 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-audit-dir\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.294934 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/78c06cb9-944c-4f1c-a705-a7264c1c87ff-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-925p5\" (UID: \"78c06cb9-944c-4f1c-a705-a7264c1c87ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-925p5" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.295198 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-dn92s" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.295215 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536350-85r2j"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.295612 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/fd02452f-1a87-43a5-8f32-3b63d9f522e3-encryption-config\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.295958 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536350-85r2j" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.296007 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d8e9a89a-0411-4456-9637-08712b004662-client-ca\") pod \"route-controller-manager-6576b87f9c-b7qtg\" (UID: \"d8e9a89a-0411-4456-9637-08712b004662\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.296490 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.296605 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0f8b1512-2590-418e-8504-70ef3c1567b0-audit-dir\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.297118 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/fd02452f-1a87-43a5-8f32-3b63d9f522e3-audit\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.297399 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.297820 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-925p5"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.299104 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-l9pjw"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.299905 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/ce2700f0-be98-4622-a729-99b696e7ecb9-machine-approver-tls\") pod \"machine-approver-56656f9798-pb9n9\" (UID: \"ce2700f0-be98-4622-a729-99b696e7ecb9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pb9n9" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.299934 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.300294 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/177eae99-352f-45fe-9593-3e8ad8345bc6-etcd-client\") pod \"etcd-operator-b45778765-knttz\" (UID: \"177eae99-352f-45fe-9593-3e8ad8345bc6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.300396 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fd02452f-1a87-43a5-8f32-3b63d9f522e3-serving-cert\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.301746 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/fd02452f-1a87-43a5-8f32-3b63d9f522e3-etcd-client\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.302055 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.302296 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-serving-cert\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.303437 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.305232 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78c06cb9-944c-4f1c-a705-a7264c1c87ff-serving-cert\") pod \"authentication-operator-69f744f599-925p5\" (UID: \"78c06cb9-944c-4f1c-a705-a7264c1c87ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-925p5" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.306106 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-b6rvf"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.306678 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8e9a89a-0411-4456-9637-08712b004662-serving-cert\") pod \"route-controller-manager-6576b87f9c-b7qtg\" (UID: \"d8e9a89a-0411-4456-9637-08712b004662\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.306955 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-b6rvf" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.307137 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mmsmb"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.307421 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.308106 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mmsmb" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.310299 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-xl55p"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.314084 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09bfe857-0a67-4902-a9b9-5738a2074657-serving-cert\") pod \"console-operator-58897d9998-dq899\" (UID: \"09bfe857-0a67-4902-a9b9-5738a2074657\") " pod="openshift-console-operator/console-operator-58897d9998-dq899" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.314146 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.314173 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-r4ppj"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.320414 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-etcd-client\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.320771 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.318676 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/543292c6-d79b-4198-932b-fdc68dcbbde5-serving-cert\") pod \"openshift-config-operator-7777fb866f-xl55p\" (UID: \"543292c6-d79b-4198-932b-fdc68dcbbde5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xl55p" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.324992 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-nd8mn"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.328036 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-6lqtc"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.328112 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2vdrh"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.328293 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-nd8mn" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.334432 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wvwrb"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.337827 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.339371 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tns4l"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.340615 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/177eae99-352f-45fe-9593-3e8ad8345bc6-etcd-service-ca\") pod \"etcd-operator-b45778765-knttz\" (UID: \"177eae99-352f-45fe-9593-3e8ad8345bc6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.342308 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-79trw"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.343777 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-hkv6l"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.347265 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-hhgws"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.348545 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-hhgws" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.350316 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-vkqzc"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.351475 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-vkqzc" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.353946 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cr7bl"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.354314 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.355664 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-dn92s"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.355870 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.358914 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lz5rn"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.358957 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dq899"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.359979 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qvd5m"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.362040 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.363621 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-knttz"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.364354 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-xx8qm"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.365836 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-r8pwh"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.366215 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.367352 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.368500 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.370198 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-hhgws"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.370957 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pqnpm"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.373667 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-ppcv6"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.375386 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dkx2c"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.376574 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p78ck"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.377734 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-rk76z"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.379072 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-q826k"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.380096 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mmsmb"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.381062 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.382462 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-km47t"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.383647 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s8vkh"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.384755 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-b6rvf"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.386202 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536350-85r2j"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.386420 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.387110 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-vkqzc"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.388652 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-rzl5p"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.390412 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-rzl5p"] Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.390628 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.407704 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.426903 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.466456 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.487503 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.525244 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.532996 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.549548 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.566662 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.586858 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.607425 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.626378 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.647128 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.666633 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.686389 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.727621 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.747233 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.766993 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.786663 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.806743 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.828100 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.847152 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.866621 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.887074 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.907267 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.927608 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.948064 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.967256 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Feb 27 08:31:58 crc kubenswrapper[4906]: I0227 08:31:58.995671 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.006347 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.027337 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.046689 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.067319 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.087769 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.107150 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.127628 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.147041 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.167152 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.189823 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.208272 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.227564 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.247309 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.268391 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.284454 4906 request.go:700] Waited for 1.016450375s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-scheduler-operator/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.288703 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Feb 27 08:31:59 crc kubenswrapper[4906]: E0227 08:31:59.291871 4906 configmap.go:193] Couldn't get configMap openshift-route-controller-manager/config: failed to sync configmap cache: timed out waiting for the condition Feb 27 08:31:59 crc kubenswrapper[4906]: E0227 08:31:59.292059 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d8e9a89a-0411-4456-9637-08712b004662-config podName:d8e9a89a-0411-4456-9637-08712b004662 nodeName:}" failed. No retries permitted until 2026-02-27 08:31:59.792034781 +0000 UTC m=+218.186436391 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/d8e9a89a-0411-4456-9637-08712b004662-config") pod "route-controller-manager-6576b87f9c-b7qtg" (UID: "d8e9a89a-0411-4456-9637-08712b004662") : failed to sync configmap cache: timed out waiting for the condition Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.307349 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.327458 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.362541 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slgzs\" (UniqueName: \"kubernetes.io/projected/ce294532-f4f2-46cf-b046-e81aa81f0f0c-kube-api-access-slgzs\") pod \"cluster-samples-operator-665b6dd947-r8pwh\" (UID: \"ce294532-f4f2-46cf-b046-e81aa81f0f0c\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-r8pwh" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.387599 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.389936 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tx2h\" (UniqueName: \"kubernetes.io/projected/ea522559-4f3c-4d90-ae46-aa2a9f27b243-kube-api-access-8tx2h\") pod \"machine-api-operator-5694c8668f-6lqtc\" (UID: \"ea522559-4f3c-4d90-ae46-aa2a9f27b243\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6lqtc" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.392932 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-6lqtc" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.406659 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.447012 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.448294 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6bsr7\" (UniqueName: \"kubernetes.io/projected/177eae99-352f-45fe-9593-3e8ad8345bc6-kube-api-access-6bsr7\") pod \"etcd-operator-b45778765-knttz\" (UID: \"177eae99-352f-45fe-9593-3e8ad8345bc6\") " pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.466488 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.503156 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mh42q\" (UniqueName: \"kubernetes.io/projected/543292c6-d79b-4198-932b-fdc68dcbbde5-kube-api-access-mh42q\") pod \"openshift-config-operator-7777fb866f-xl55p\" (UID: \"543292c6-d79b-4198-932b-fdc68dcbbde5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-xl55p" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.526691 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.529691 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hn86w\" (UniqueName: \"kubernetes.io/projected/0f8b1512-2590-418e-8504-70ef3c1567b0-kube-api-access-hn86w\") pod \"oauth-openshift-558db77b4-lz5rn\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.546405 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.567601 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.568753 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-r8pwh" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.586669 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.590300 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.603022 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.606067 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.626959 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.632521 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-6lqtc"] Feb 27 08:31:59 crc kubenswrapper[4906]: W0227 08:31:59.641045 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podea522559_4f3c_4d90_ae46_aa2a9f27b243.slice/crio-bc96361c1ad8e5deb45529bc1b6ebed4166986f3ab96ccc60d0c93d4c3259934 WatchSource:0}: Error finding container bc96361c1ad8e5deb45529bc1b6ebed4166986f3ab96ccc60d0c93d4c3259934: Status 404 returned error can't find the container with id bc96361c1ad8e5deb45529bc1b6ebed4166986f3ab96ccc60d0c93d4c3259934 Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.661648 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbqjw\" (UniqueName: \"kubernetes.io/projected/d6250048-cedf-4ca4-86f2-b0d45534d374-kube-api-access-nbqjw\") pod \"openshift-apiserver-operator-796bbdcf4f-l9pjw\" (UID: \"d6250048-cedf-4ca4-86f2-b0d45534d374\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-l9pjw" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.674632 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-l9pjw" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.709541 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zb5l\" (UniqueName: \"kubernetes.io/projected/16f8b49f-8dc0-4aa5-bb80-9642e2688141-kube-api-access-9zb5l\") pod \"dns-operator-744455d44c-r4ppj\" (UID: \"16f8b49f-8dc0-4aa5-bb80-9642e2688141\") " pod="openshift-dns-operator/dns-operator-744455d44c-r4ppj" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.722489 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7pwpx\" (UniqueName: \"kubernetes.io/projected/fd02452f-1a87-43a5-8f32-3b63d9f522e3-kube-api-access-7pwpx\") pod \"apiserver-76f77b778f-zl4pc\" (UID: \"fd02452f-1a87-43a5-8f32-3b63d9f522e3\") " pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.728809 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.760341 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xl55p" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.763022 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bs75s\" (UniqueName: \"kubernetes.io/projected/ce2700f0-be98-4622-a729-99b696e7ecb9-kube-api-access-bs75s\") pod \"machine-approver-56656f9798-pb9n9\" (UID: \"ce2700f0-be98-4622-a729-99b696e7ecb9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pb9n9" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.763841 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-r8pwh"] Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.780430 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pn8f9\" (UniqueName: \"kubernetes.io/projected/8a68474e-6d7c-4b29-89e0-90d25dbbf86e-kube-api-access-pn8f9\") pod \"apiserver-7bbb656c7d-gbqf7\" (UID: \"8a68474e-6d7c-4b29-89e0-90d25dbbf86e\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.785656 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.797266 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8e9a89a-0411-4456-9637-08712b004662-config\") pod \"route-controller-manager-6576b87f9c-b7qtg\" (UID: \"d8e9a89a-0411-4456-9637-08712b004662\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.806973 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.817227 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-6lqtc" event={"ID":"ea522559-4f3c-4d90-ae46-aa2a9f27b243","Type":"ContainerStarted","Data":"bc96361c1ad8e5deb45529bc1b6ebed4166986f3ab96ccc60d0c93d4c3259934"} Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.820272 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lz5rn"] Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.825712 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Feb 27 08:31:59 crc kubenswrapper[4906]: W0227 08:31:59.842930 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0f8b1512_2590_418e_8504_70ef3c1567b0.slice/crio-fb57066c6a21ca243fe0a91e97560b10bf46391dd734f025e4cfe0615a8311a1 WatchSource:0}: Error finding container fb57066c6a21ca243fe0a91e97560b10bf46391dd734f025e4cfe0615a8311a1: Status 404 returned error can't find the container with id fb57066c6a21ca243fe0a91e97560b10bf46391dd734f025e4cfe0615a8311a1 Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.849699 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.849782 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.865428 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-knttz"] Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.878550 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pb9n9" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.881095 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtdcl\" (UniqueName: \"kubernetes.io/projected/09bfe857-0a67-4902-a9b9-5738a2074657-kube-api-access-jtdcl\") pod \"console-operator-58897d9998-dq899\" (UID: \"09bfe857-0a67-4902-a9b9-5738a2074657\") " pod="openshift-console-operator/console-operator-58897d9998-dq899" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.881386 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.908657 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hqd26\" (UniqueName: \"kubernetes.io/projected/78c06cb9-944c-4f1c-a705-a7264c1c87ff-kube-api-access-hqd26\") pod \"authentication-operator-69f744f599-925p5\" (UID: \"78c06cb9-944c-4f1c-a705-a7264c1c87ff\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-925p5" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.918586 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-l9pjw"] Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.922233 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4px78\" (UniqueName: \"kubernetes.io/projected/390464d8-fc1d-443a-85f7-7164ac4e2d05-kube-api-access-4px78\") pod \"controller-manager-879f6c89f-qvd5m\" (UID: \"390464d8-fc1d-443a-85f7-7164ac4e2d05\") " pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.945659 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-r4ppj" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.950380 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vcg2p\" (UniqueName: \"kubernetes.io/projected/4ceb7b65-ef46-4eb8-a129-d462e1989488-kube-api-access-vcg2p\") pod \"openshift-controller-manager-operator-756b6f6bc6-tns4l\" (UID: \"4ceb7b65-ef46-4eb8-a129-d462e1989488\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tns4l" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.965025 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.965566 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.966617 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.981267 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-xl55p"] Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.986917 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Feb 27 08:31:59 crc kubenswrapper[4906]: I0227 08:31:59.994544 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-925p5" Feb 27 08:32:00 crc kubenswrapper[4906]: W0227 08:32:00.000132 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod543292c6_d79b_4198_932b_fdc68dcbbde5.slice/crio-de3014de5fc4253d4eb3e1f17b596bac8689d76df5a345b37e9c01bc1b0c87e4 WatchSource:0}: Error finding container de3014de5fc4253d4eb3e1f17b596bac8689d76df5a345b37e9c01bc1b0c87e4: Status 404 returned error can't find the container with id de3014de5fc4253d4eb3e1f17b596bac8689d76df5a345b37e9c01bc1b0c87e4 Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.007625 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.032358 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.041724 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7"] Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.047715 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.068831 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.075828 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-dq899" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.090279 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.111653 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Feb 27 08:32:00 crc kubenswrapper[4906]: W0227 08:32:00.118583 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a68474e_6d7c_4b29_89e0_90d25dbbf86e.slice/crio-fb6f0ac4289b3dc5d7ce59678afe6cc960be3efb203b4c3ffc5c4ab1f9170ddf WatchSource:0}: Error finding container fb6f0ac4289b3dc5d7ce59678afe6cc960be3efb203b4c3ffc5c4ab1f9170ddf: Status 404 returned error can't find the container with id fb6f0ac4289b3dc5d7ce59678afe6cc960be3efb203b4c3ffc5c4ab1f9170ddf Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.125797 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.138493 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536352-8fqhc"] Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.139648 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536352-8fqhc" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.146818 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.147079 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536352-8fqhc"] Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.163177 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tns4l" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.167144 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.189950 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.193641 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-zl4pc"] Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.207467 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.228317 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.246825 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.268862 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.286338 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.304296 4906 request.go:700] Waited for 1.955248905s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns/configmaps?fieldSelector=metadata.name%3Ddns-default&limit=500&resourceVersion=0 Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.306199 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.329589 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.348532 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.362019 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-r4ppj"] Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.372613 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Feb 27 08:32:00 crc kubenswrapper[4906]: W0227 08:32:00.385787 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod16f8b49f_8dc0_4aa5_bb80_9642e2688141.slice/crio-85681be5202b3c2fd273a7be742c8372b6f5d73309e92a30bfb7599c5e4abef7 WatchSource:0}: Error finding container 85681be5202b3c2fd273a7be742c8372b6f5d73309e92a30bfb7599c5e4abef7: Status 404 returned error can't find the container with id 85681be5202b3c2fd273a7be742c8372b6f5d73309e92a30bfb7599c5e4abef7 Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.391361 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.413144 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.433117 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-925p5"] Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.440061 4906 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.443218 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qvd5m"] Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.449968 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.504818 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-cr7bl\" (UID: \"13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cr7bl" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.505224 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zs5qc\" (UniqueName: \"kubernetes.io/projected/84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1-kube-api-access-zs5qc\") pod \"ingress-operator-5b745b69d9-62r9p\" (UID: \"84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.505245 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-service-ca\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.505284 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jll25\" (UniqueName: \"kubernetes.io/projected/13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850-kube-api-access-jll25\") pod \"cluster-image-registry-operator-dc59b4c8b-cr7bl\" (UID: \"13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cr7bl" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.505316 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gnm6\" (UniqueName: \"kubernetes.io/projected/a861f5dc-100c-443f-ab72-ecfe71895998-kube-api-access-5gnm6\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.505397 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a7a26f83-d59b-4375-bcb0-89b52426dae7-ca-trust-extracted\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.505469 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a7a26f83-d59b-4375-bcb0-89b52426dae7-trusted-ca\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.505502 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a861f5dc-100c-443f-ab72-ecfe71895998-console-serving-cert\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.505517 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef6057ac-2815-4543-ba7c-ae864cc3a830-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2vdrh\" (UID: \"ef6057ac-2815-4543-ba7c-ae864cc3a830\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2vdrh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.505552 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-console-config\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.505586 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c92m5\" (UniqueName: \"kubernetes.io/projected/f3cc181a-c108-493a-87fa-9bf76f81b062-kube-api-access-c92m5\") pod \"downloads-7954f5f757-hkv6l\" (UID: \"f3cc181a-c108-493a-87fa-9bf76f81b062\") " pod="openshift-console/downloads-7954f5f757-hkv6l" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.505601 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-cr7bl\" (UID: \"13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cr7bl" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.505671 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.505721 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1-trusted-ca\") pod \"ingress-operator-5b745b69d9-62r9p\" (UID: \"84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.505736 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-oauth-serving-cert\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.505789 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1-metrics-tls\") pod \"ingress-operator-5b745b69d9-62r9p\" (UID: \"84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.505805 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-cr7bl\" (UID: \"13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cr7bl" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.505858 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ef6057ac-2815-4543-ba7c-ae864cc3a830-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2vdrh\" (UID: \"ef6057ac-2815-4543-ba7c-ae864cc3a830\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2vdrh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.505943 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a7a26f83-d59b-4375-bcb0-89b52426dae7-registry-certificates\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.505987 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a7a26f83-d59b-4375-bcb0-89b52426dae7-bound-sa-token\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.506035 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a861f5dc-100c-443f-ab72-ecfe71895998-console-oauth-config\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.506049 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-trusted-ca-bundle\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.506301 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef6057ac-2815-4543-ba7c-ae864cc3a830-config\") pod \"kube-controller-manager-operator-78b949d7b-2vdrh\" (UID: \"ef6057ac-2815-4543-ba7c-ae864cc3a830\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2vdrh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.506317 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1-bound-sa-token\") pod \"ingress-operator-5b745b69d9-62r9p\" (UID: \"84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.506353 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5pwc\" (UniqueName: \"kubernetes.io/projected/05f137f2-dd6a-4dc0-b02d-31e9b81d2a8d-kube-api-access-h5pwc\") pod \"migrator-59844c95c7-79trw\" (UID: \"05f137f2-dd6a-4dc0-b02d-31e9b81d2a8d\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-79trw" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.506387 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a7a26f83-d59b-4375-bcb0-89b52426dae7-installation-pull-secrets\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.506421 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmxfn\" (UniqueName: \"kubernetes.io/projected/a7a26f83-d59b-4375-bcb0-89b52426dae7-kube-api-access-pmxfn\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.506448 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a7a26f83-d59b-4375-bcb0-89b52426dae7-registry-tls\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.507176 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.508709 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8e9a89a-0411-4456-9637-08712b004662-config\") pod \"route-controller-manager-6576b87f9c-b7qtg\" (UID: \"d8e9a89a-0411-4456-9637-08712b004662\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" Feb 27 08:32:00 crc kubenswrapper[4906]: E0227 08:32:00.512936 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:01.012917413 +0000 UTC m=+219.407319023 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.527416 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.543127 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntr47\" (UniqueName: \"kubernetes.io/projected/d8e9a89a-0411-4456-9637-08712b004662-kube-api-access-ntr47\") pod \"route-controller-manager-6576b87f9c-b7qtg\" (UID: \"d8e9a89a-0411-4456-9637-08712b004662\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.596849 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tns4l"] Feb 27 08:32:00 crc kubenswrapper[4906]: W0227 08:32:00.603365 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod390464d8_fc1d_443a_85f7_7164ac4e2d05.slice/crio-fc2dfcd733248be924a807b6e66df1a8b04691bab48b4dab5e2d284d20c14abf WatchSource:0}: Error finding container fc2dfcd733248be924a807b6e66df1a8b04691bab48b4dab5e2d284d20c14abf: Status 404 returned error can't find the container with id fc2dfcd733248be924a807b6e66df1a8b04691bab48b4dab5e2d284d20c14abf Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.607671 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:00 crc kubenswrapper[4906]: E0227 08:32:00.607834 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:01.107792799 +0000 UTC m=+219.502194419 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.607958 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1-trusted-ca\") pod \"ingress-operator-5b745b69d9-62r9p\" (UID: \"84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608018 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-oauth-serving-cert\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608052 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9wgc\" (UniqueName: \"kubernetes.io/projected/f743212c-ed63-408e-8063-ed04c8a7a1a9-kube-api-access-x9wgc\") pod \"auto-csr-approver-29536350-85r2j\" (UID: \"f743212c-ed63-408e-8063-ed04c8a7a1a9\") " pod="openshift-infra/auto-csr-approver-29536350-85r2j" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608083 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f5b83410-ebfb-4bd0-a32e-b51029bd42cf-webhook-cert\") pod \"packageserver-d55dfcdfc-4655n\" (UID: \"f5b83410-ebfb-4bd0-a32e-b51029bd42cf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608110 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-cr7bl\" (UID: \"13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cr7bl" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608134 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/f5b83410-ebfb-4bd0-a32e-b51029bd42cf-tmpfs\") pod \"packageserver-d55dfcdfc-4655n\" (UID: \"f5b83410-ebfb-4bd0-a32e-b51029bd42cf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608159 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/c1d09410-c54c-478a-a350-58f3310099af-certs\") pod \"machine-config-server-nd8mn\" (UID: \"c1d09410-c54c-478a-a350-58f3310099af\") " pod="openshift-machine-config-operator/machine-config-server-nd8mn" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608186 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/75b3451d-2451-4a02-a510-fa27d6ea841a-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-q826k\" (UID: \"75b3451d-2451-4a02-a510-fa27d6ea841a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q826k" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608208 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rl9nk\" (UniqueName: \"kubernetes.io/projected/db9e745a-c371-4b93-91ba-f755f3d4929d-kube-api-access-rl9nk\") pod \"router-default-5444994796-bm27t\" (UID: \"db9e745a-c371-4b93-91ba-f755f3d4929d\") " pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608239 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6357283f-dd75-4101-8a3c-24fd81f0a991-serving-cert\") pod \"service-ca-operator-777779d784-dn92s\" (UID: \"6357283f-dd75-4101-8a3c-24fd81f0a991\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dn92s" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608272 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2x6nx\" (UniqueName: \"kubernetes.io/projected/ce4d5219-eab9-4fda-bf96-5a7ef9056300-kube-api-access-2x6nx\") pod \"dns-default-hhgws\" (UID: \"ce4d5219-eab9-4fda-bf96-5a7ef9056300\") " pod="openshift-dns/dns-default-hhgws" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608294 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/7f5ee13e-99ff-44b5-a302-dcbaf840fe79-signing-cabundle\") pod \"service-ca-9c57cc56f-b6rvf\" (UID: \"7f5ee13e-99ff-44b5-a302-dcbaf840fe79\") " pod="openshift-service-ca/service-ca-9c57cc56f-b6rvf" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608317 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbtgj\" (UniqueName: \"kubernetes.io/projected/20abae03-4ab8-4e0e-91f2-f4ec1bfc9529-kube-api-access-tbtgj\") pod \"catalog-operator-68c6474976-m8zqh\" (UID: \"20abae03-4ab8-4e0e-91f2-f4ec1bfc9529\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608346 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a7a26f83-d59b-4375-bcb0-89b52426dae7-bound-sa-token\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608374 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m89v8\" (UniqueName: \"kubernetes.io/projected/7f5ee13e-99ff-44b5-a302-dcbaf840fe79-kube-api-access-m89v8\") pod \"service-ca-9c57cc56f-b6rvf\" (UID: \"7f5ee13e-99ff-44b5-a302-dcbaf840fe79\") " pod="openshift-service-ca/service-ca-9c57cc56f-b6rvf" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608395 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-trusted-ca-bundle\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608416 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtblc\" (UniqueName: \"kubernetes.io/projected/0f7a799f-448a-414c-b8a0-8bd5f016e5c1-kube-api-access-rtblc\") pod \"machine-config-operator-74547568cd-9jcn2\" (UID: \"0f7a799f-448a-414c-b8a0-8bd5f016e5c1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608433 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6357283f-dd75-4101-8a3c-24fd81f0a991-config\") pod \"service-ca-operator-777779d784-dn92s\" (UID: \"6357283f-dd75-4101-8a3c-24fd81f0a991\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dn92s" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608454 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef6057ac-2815-4543-ba7c-ae864cc3a830-config\") pod \"kube-controller-manager-operator-78b949d7b-2vdrh\" (UID: \"ef6057ac-2815-4543-ba7c-ae864cc3a830\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2vdrh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608471 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1-bound-sa-token\") pod \"ingress-operator-5b745b69d9-62r9p\" (UID: \"84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608493 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5pwc\" (UniqueName: \"kubernetes.io/projected/05f137f2-dd6a-4dc0-b02d-31e9b81d2a8d-kube-api-access-h5pwc\") pod \"migrator-59844c95c7-79trw\" (UID: \"05f137f2-dd6a-4dc0-b02d-31e9b81d2a8d\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-79trw" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608513 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zhhwt\" (UniqueName: \"kubernetes.io/projected/582fc06a-0d1d-4260-a91f-af317ab278d9-kube-api-access-zhhwt\") pod \"auto-csr-approver-29536352-8fqhc\" (UID: \"582fc06a-0d1d-4260-a91f-af317ab278d9\") " pod="openshift-infra/auto-csr-approver-29536352-8fqhc" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608529 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/7f5ee13e-99ff-44b5-a302-dcbaf840fe79-signing-key\") pod \"service-ca-9c57cc56f-b6rvf\" (UID: \"7f5ee13e-99ff-44b5-a302-dcbaf840fe79\") " pod="openshift-service-ca/service-ca-9c57cc56f-b6rvf" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608557 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a7a26f83-d59b-4375-bcb0-89b52426dae7-registry-tls\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608589 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a7a26f83-d59b-4375-bcb0-89b52426dae7-installation-pull-secrets\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608613 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmxfn\" (UniqueName: \"kubernetes.io/projected/a7a26f83-d59b-4375-bcb0-89b52426dae7-kube-api-access-pmxfn\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608636 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0f7a799f-448a-414c-b8a0-8bd5f016e5c1-images\") pod \"machine-config-operator-74547568cd-9jcn2\" (UID: \"0f7a799f-448a-414c-b8a0-8bd5f016e5c1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608654 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/1f4e9433-9a0f-4432-bfbe-b98fe89961bd-csi-data-dir\") pod \"csi-hostpathplugin-rzl5p\" (UID: \"1f4e9433-9a0f-4432-bfbe-b98fe89961bd\") " pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608674 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/d217f1ee-f917-4ac1-bf8d-c8a011d42ebc-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-p78ck\" (UID: \"d217f1ee-f917-4ac1-bf8d-c8a011d42ebc\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p78ck" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608695 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zs5qc\" (UniqueName: \"kubernetes.io/projected/84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1-kube-api-access-zs5qc\") pod \"ingress-operator-5b745b69d9-62r9p\" (UID: \"84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608714 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-service-ca\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608732 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jll25\" (UniqueName: \"kubernetes.io/projected/13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850-kube-api-access-jll25\") pod \"cluster-image-registry-operator-dc59b4c8b-cr7bl\" (UID: \"13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cr7bl" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608759 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ffd1a145-703e-452a-b666-41c9c837985d-cert\") pod \"ingress-canary-vkqzc\" (UID: \"ffd1a145-703e-452a-b666-41c9c837985d\") " pod="openshift-ingress-canary/ingress-canary-vkqzc" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608782 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r9z98\" (UniqueName: \"kubernetes.io/projected/739fb53a-c353-4113-9f21-062b6580a184-kube-api-access-r9z98\") pod \"marketplace-operator-79b997595-pqnpm\" (UID: \"739fb53a-c353-4113-9f21-062b6580a184\") " pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608804 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/db9e745a-c371-4b93-91ba-f755f3d4929d-metrics-certs\") pod \"router-default-5444994796-bm27t\" (UID: \"db9e745a-c371-4b93-91ba-f755f3d4929d\") " pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608833 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ce4d5219-eab9-4fda-bf96-5a7ef9056300-config-volume\") pod \"dns-default-hhgws\" (UID: \"ce4d5219-eab9-4fda-bf96-5a7ef9056300\") " pod="openshift-dns/dns-default-hhgws" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608854 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/917d272c-e31b-420f-9408-06f7130b76ba-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dkx2c\" (UID: \"917d272c-e31b-420f-9408-06f7130b76ba\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dkx2c" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608901 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkntp\" (UniqueName: \"kubernetes.io/projected/75b3451d-2451-4a02-a510-fa27d6ea841a-kube-api-access-zkntp\") pod \"multus-admission-controller-857f4d67dd-q826k\" (UID: \"75b3451d-2451-4a02-a510-fa27d6ea841a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q826k" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608932 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a861f5dc-100c-443f-ab72-ecfe71895998-console-serving-cert\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608954 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/74d50313-482b-4d26-acd9-ce88a2c53093-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-ppcv6\" (UID: \"74d50313-482b-4d26-acd9-ce88a2c53093\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ppcv6" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.608975 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/1f4e9433-9a0f-4432-bfbe-b98fe89961bd-registration-dir\") pod \"csi-hostpathplugin-rzl5p\" (UID: \"1f4e9433-9a0f-4432-bfbe-b98fe89961bd\") " pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609016 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a7a26f83-d59b-4375-bcb0-89b52426dae7-trusted-ca\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609039 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88g4m\" (UniqueName: \"kubernetes.io/projected/1f4e9433-9a0f-4432-bfbe-b98fe89961bd-kube-api-access-88g4m\") pod \"csi-hostpathplugin-rzl5p\" (UID: \"1f4e9433-9a0f-4432-bfbe-b98fe89961bd\") " pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609060 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/db9e745a-c371-4b93-91ba-f755f3d4929d-stats-auth\") pod \"router-default-5444994796-bm27t\" (UID: \"db9e745a-c371-4b93-91ba-f755f3d4929d\") " pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609080 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/74d50313-482b-4d26-acd9-ce88a2c53093-proxy-tls\") pod \"machine-config-controller-84d6567774-ppcv6\" (UID: \"74d50313-482b-4d26-acd9-ce88a2c53093\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ppcv6" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609096 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ce4d5219-eab9-4fda-bf96-5a7ef9056300-metrics-tls\") pod \"dns-default-hhgws\" (UID: \"ce4d5219-eab9-4fda-bf96-5a7ef9056300\") " pod="openshift-dns/dns-default-hhgws" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609113 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhvpx\" (UniqueName: \"kubernetes.io/projected/d217f1ee-f917-4ac1-bf8d-c8a011d42ebc-kube-api-access-fhvpx\") pod \"control-plane-machine-set-operator-78cbb6b69f-p78ck\" (UID: \"d217f1ee-f917-4ac1-bf8d-c8a011d42ebc\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p78ck" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609129 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ac646565-cd5f-405a-ad92-3f2afb51d5c8-secret-volume\") pod \"collect-profiles-29536350-kfpvx\" (UID: \"ac646565-cd5f-405a-ad92-3f2afb51d5c8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609153 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c92m5\" (UniqueName: \"kubernetes.io/projected/f3cc181a-c108-493a-87fa-9bf76f81b062-kube-api-access-c92m5\") pod \"downloads-7954f5f757-hkv6l\" (UID: \"f3cc181a-c108-493a-87fa-9bf76f81b062\") " pod="openshift-console/downloads-7954f5f757-hkv6l" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609174 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-cr7bl\" (UID: \"13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cr7bl" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609200 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9vqm\" (UniqueName: \"kubernetes.io/projected/c1d09410-c54c-478a-a350-58f3310099af-kube-api-access-n9vqm\") pod \"machine-config-server-nd8mn\" (UID: \"c1d09410-c54c-478a-a350-58f3310099af\") " pod="openshift-machine-config-operator/machine-config-server-nd8mn" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609220 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/739fb53a-c353-4113-9f21-062b6580a184-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pqnpm\" (UID: \"739fb53a-c353-4113-9f21-062b6580a184\") " pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609248 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609269 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/1f4e9433-9a0f-4432-bfbe-b98fe89961bd-plugins-dir\") pod \"csi-hostpathplugin-rzl5p\" (UID: \"1f4e9433-9a0f-4432-bfbe-b98fe89961bd\") " pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609287 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/144c52bf-12d2-4b24-baf9-f8f2b0a587e1-config\") pod \"kube-apiserver-operator-766d6c64bb-rk76z\" (UID: \"144c52bf-12d2-4b24-baf9-f8f2b0a587e1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-rk76z" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609307 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vksxh\" (UniqueName: \"kubernetes.io/projected/9c399ec5-479b-4e68-b556-691d6b9dd26f-kube-api-access-vksxh\") pod \"package-server-manager-789f6589d5-mmsmb\" (UID: \"9c399ec5-479b-4e68-b556-691d6b9dd26f\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mmsmb" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609330 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1-metrics-tls\") pod \"ingress-operator-5b745b69d9-62r9p\" (UID: \"84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609355 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ac646565-cd5f-405a-ad92-3f2afb51d5c8-config-volume\") pod \"collect-profiles-29536350-kfpvx\" (UID: \"ac646565-cd5f-405a-ad92-3f2afb51d5c8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609393 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/144c52bf-12d2-4b24-baf9-f8f2b0a587e1-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-rk76z\" (UID: \"144c52bf-12d2-4b24-baf9-f8f2b0a587e1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-rk76z" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609415 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ef6057ac-2815-4543-ba7c-ae864cc3a830-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2vdrh\" (UID: \"ef6057ac-2815-4543-ba7c-ae864cc3a830\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2vdrh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609432 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8be167c5-1053-42f7-ae0a-11b4fc0d8333-srv-cert\") pod \"olm-operator-6b444d44fb-s8vkh\" (UID: \"8be167c5-1053-42f7-ae0a-11b4fc0d8333\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s8vkh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609451 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hl4cc\" (UniqueName: \"kubernetes.io/projected/dbd83c42-7c7f-4257-9105-58f2e2fae841-kube-api-access-hl4cc\") pod \"kube-storage-version-migrator-operator-b67b599dd-wvwrb\" (UID: \"dbd83c42-7c7f-4257-9105-58f2e2fae841\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wvwrb" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609469 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dbd83c42-7c7f-4257-9105-58f2e2fae841-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-wvwrb\" (UID: \"dbd83c42-7c7f-4257-9105-58f2e2fae841\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wvwrb" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609490 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a7a26f83-d59b-4375-bcb0-89b52426dae7-registry-certificates\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609507 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbd83c42-7c7f-4257-9105-58f2e2fae841-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-wvwrb\" (UID: \"dbd83c42-7c7f-4257-9105-58f2e2fae841\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wvwrb" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609526 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/db9e745a-c371-4b93-91ba-f755f3d4929d-service-ca-bundle\") pod \"router-default-5444994796-bm27t\" (UID: \"db9e745a-c371-4b93-91ba-f755f3d4929d\") " pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609546 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a861f5dc-100c-443f-ab72-ecfe71895998-console-oauth-config\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609566 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/1f4e9433-9a0f-4432-bfbe-b98fe89961bd-mountpoint-dir\") pod \"csi-hostpathplugin-rzl5p\" (UID: \"1f4e9433-9a0f-4432-bfbe-b98fe89961bd\") " pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609587 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0f7a799f-448a-414c-b8a0-8bd5f016e5c1-proxy-tls\") pod \"machine-config-operator-74547568cd-9jcn2\" (UID: \"0f7a799f-448a-414c-b8a0-8bd5f016e5c1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609615 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/db9e745a-c371-4b93-91ba-f755f3d4929d-default-certificate\") pod \"router-default-5444994796-bm27t\" (UID: \"db9e745a-c371-4b93-91ba-f755f3d4929d\") " pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609650 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkdqg\" (UniqueName: \"kubernetes.io/projected/ffd1a145-703e-452a-b666-41c9c837985d-kube-api-access-kkdqg\") pod \"ingress-canary-vkqzc\" (UID: \"ffd1a145-703e-452a-b666-41c9c837985d\") " pod="openshift-ingress-canary/ingress-canary-vkqzc" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609673 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5mct\" (UniqueName: \"kubernetes.io/projected/8be167c5-1053-42f7-ae0a-11b4fc0d8333-kube-api-access-q5mct\") pod \"olm-operator-6b444d44fb-s8vkh\" (UID: \"8be167c5-1053-42f7-ae0a-11b4fc0d8333\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s8vkh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609698 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/739fb53a-c353-4113-9f21-062b6580a184-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pqnpm\" (UID: \"739fb53a-c353-4113-9f21-062b6580a184\") " pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609744 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqvpj\" (UniqueName: \"kubernetes.io/projected/74d50313-482b-4d26-acd9-ce88a2c53093-kube-api-access-pqvpj\") pod \"machine-config-controller-84d6567774-ppcv6\" (UID: \"74d50313-482b-4d26-acd9-ce88a2c53093\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ppcv6" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609763 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f5b83410-ebfb-4bd0-a32e-b51029bd42cf-apiservice-cert\") pod \"packageserver-d55dfcdfc-4655n\" (UID: \"f5b83410-ebfb-4bd0-a32e-b51029bd42cf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609781 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-cr7bl\" (UID: \"13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cr7bl" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609798 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8be167c5-1053-42f7-ae0a-11b4fc0d8333-profile-collector-cert\") pod \"olm-operator-6b444d44fb-s8vkh\" (UID: \"8be167c5-1053-42f7-ae0a-11b4fc0d8333\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s8vkh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609813 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/144c52bf-12d2-4b24-baf9-f8f2b0a587e1-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-rk76z\" (UID: \"144c52bf-12d2-4b24-baf9-f8f2b0a587e1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-rk76z" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609837 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9c399ec5-479b-4e68-b556-691d6b9dd26f-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mmsmb\" (UID: \"9c399ec5-479b-4e68-b556-691d6b9dd26f\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mmsmb" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609857 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gnm6\" (UniqueName: \"kubernetes.io/projected/a861f5dc-100c-443f-ab72-ecfe71895998-kube-api-access-5gnm6\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609894 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/c1d09410-c54c-478a-a350-58f3310099af-node-bootstrap-token\") pod \"machine-config-server-nd8mn\" (UID: \"c1d09410-c54c-478a-a350-58f3310099af\") " pod="openshift-machine-config-operator/machine-config-server-nd8mn" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609914 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/1f4e9433-9a0f-4432-bfbe-b98fe89961bd-socket-dir\") pod \"csi-hostpathplugin-rzl5p\" (UID: \"1f4e9433-9a0f-4432-bfbe-b98fe89961bd\") " pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609932 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a7a26f83-d59b-4375-bcb0-89b52426dae7-ca-trust-extracted\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609949 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/917d272c-e31b-420f-9408-06f7130b76ba-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dkx2c\" (UID: \"917d272c-e31b-420f-9408-06f7130b76ba\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dkx2c" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609978 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef6057ac-2815-4543-ba7c-ae864cc3a830-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2vdrh\" (UID: \"ef6057ac-2815-4543-ba7c-ae864cc3a830\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2vdrh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.609995 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/20abae03-4ab8-4e0e-91f2-f4ec1bfc9529-srv-cert\") pod \"catalog-operator-68c6474976-m8zqh\" (UID: \"20abae03-4ab8-4e0e-91f2-f4ec1bfc9529\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.610014 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmd5p\" (UniqueName: \"kubernetes.io/projected/f5b83410-ebfb-4bd0-a32e-b51029bd42cf-kube-api-access-rmd5p\") pod \"packageserver-d55dfcdfc-4655n\" (UID: \"f5b83410-ebfb-4bd0-a32e-b51029bd42cf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.610036 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-console-config\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.610055 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0f7a799f-448a-414c-b8a0-8bd5f016e5c1-auth-proxy-config\") pod \"machine-config-operator-74547568cd-9jcn2\" (UID: \"0f7a799f-448a-414c-b8a0-8bd5f016e5c1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.610075 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/917d272c-e31b-420f-9408-06f7130b76ba-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dkx2c\" (UID: \"917d272c-e31b-420f-9408-06f7130b76ba\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dkx2c" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.610091 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7vz4p\" (UniqueName: \"kubernetes.io/projected/6357283f-dd75-4101-8a3c-24fd81f0a991-kube-api-access-7vz4p\") pod \"service-ca-operator-777779d784-dn92s\" (UID: \"6357283f-dd75-4101-8a3c-24fd81f0a991\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dn92s" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.610153 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sgwc\" (UniqueName: \"kubernetes.io/projected/ac646565-cd5f-405a-ad92-3f2afb51d5c8-kube-api-access-9sgwc\") pod \"collect-profiles-29536350-kfpvx\" (UID: \"ac646565-cd5f-405a-ad92-3f2afb51d5c8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.610170 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/20abae03-4ab8-4e0e-91f2-f4ec1bfc9529-profile-collector-cert\") pod \"catalog-operator-68c6474976-m8zqh\" (UID: \"20abae03-4ab8-4e0e-91f2-f4ec1bfc9529\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.611835 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1-trusted-ca\") pod \"ingress-operator-5b745b69d9-62r9p\" (UID: \"84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.612849 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-oauth-serving-cert\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: E0227 08:32:00.614514 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:01.11449181 +0000 UTC m=+219.508893420 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.615978 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef6057ac-2815-4543-ba7c-ae864cc3a830-config\") pod \"kube-controller-manager-operator-78b949d7b-2vdrh\" (UID: \"ef6057ac-2815-4543-ba7c-ae864cc3a830\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2vdrh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.616529 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-console-config\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.618488 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-service-ca\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.619280 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a7a26f83-d59b-4375-bcb0-89b52426dae7-trusted-ca\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.619426 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a7a26f83-d59b-4375-bcb0-89b52426dae7-ca-trust-extracted\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.620398 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a7a26f83-d59b-4375-bcb0-89b52426dae7-registry-certificates\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.620478 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-trusted-ca-bundle\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.621903 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a861f5dc-100c-443f-ab72-ecfe71895998-console-oauth-config\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.624674 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a7a26f83-d59b-4375-bcb0-89b52426dae7-installation-pull-secrets\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.625077 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1-metrics-tls\") pod \"ingress-operator-5b745b69d9-62r9p\" (UID: \"84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.626135 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a861f5dc-100c-443f-ab72-ecfe71895998-console-serving-cert\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.626601 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-cr7bl\" (UID: \"13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cr7bl" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.630364 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-cr7bl\" (UID: \"13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cr7bl" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.631217 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a7a26f83-d59b-4375-bcb0-89b52426dae7-registry-tls\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.637416 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef6057ac-2815-4543-ba7c-ae864cc3a830-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-2vdrh\" (UID: \"ef6057ac-2815-4543-ba7c-ae864cc3a830\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2vdrh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.643128 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ef6057ac-2815-4543-ba7c-ae864cc3a830-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-2vdrh\" (UID: \"ef6057ac-2815-4543-ba7c-ae864cc3a830\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2vdrh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.661750 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a7a26f83-d59b-4375-bcb0-89b52426dae7-bound-sa-token\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.683963 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gnm6\" (UniqueName: \"kubernetes.io/projected/a861f5dc-100c-443f-ab72-ecfe71895998-kube-api-access-5gnm6\") pod \"console-f9d7485db-xx8qm\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.700810 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5pwc\" (UniqueName: \"kubernetes.io/projected/05f137f2-dd6a-4dc0-b02d-31e9b81d2a8d-kube-api-access-h5pwc\") pod \"migrator-59844c95c7-79trw\" (UID: \"05f137f2-dd6a-4dc0-b02d-31e9b81d2a8d\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-79trw" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.711411 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.711699 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6357283f-dd75-4101-8a3c-24fd81f0a991-serving-cert\") pod \"service-ca-operator-777779d784-dn92s\" (UID: \"6357283f-dd75-4101-8a3c-24fd81f0a991\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dn92s" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.711766 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2x6nx\" (UniqueName: \"kubernetes.io/projected/ce4d5219-eab9-4fda-bf96-5a7ef9056300-kube-api-access-2x6nx\") pod \"dns-default-hhgws\" (UID: \"ce4d5219-eab9-4fda-bf96-5a7ef9056300\") " pod="openshift-dns/dns-default-hhgws" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.711789 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/7f5ee13e-99ff-44b5-a302-dcbaf840fe79-signing-cabundle\") pod \"service-ca-9c57cc56f-b6rvf\" (UID: \"7f5ee13e-99ff-44b5-a302-dcbaf840fe79\") " pod="openshift-service-ca/service-ca-9c57cc56f-b6rvf" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.711812 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbtgj\" (UniqueName: \"kubernetes.io/projected/20abae03-4ab8-4e0e-91f2-f4ec1bfc9529-kube-api-access-tbtgj\") pod \"catalog-operator-68c6474976-m8zqh\" (UID: \"20abae03-4ab8-4e0e-91f2-f4ec1bfc9529\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.711848 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m89v8\" (UniqueName: \"kubernetes.io/projected/7f5ee13e-99ff-44b5-a302-dcbaf840fe79-kube-api-access-m89v8\") pod \"service-ca-9c57cc56f-b6rvf\" (UID: \"7f5ee13e-99ff-44b5-a302-dcbaf840fe79\") " pod="openshift-service-ca/service-ca-9c57cc56f-b6rvf" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.711894 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtblc\" (UniqueName: \"kubernetes.io/projected/0f7a799f-448a-414c-b8a0-8bd5f016e5c1-kube-api-access-rtblc\") pod \"machine-config-operator-74547568cd-9jcn2\" (UID: \"0f7a799f-448a-414c-b8a0-8bd5f016e5c1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.711927 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6357283f-dd75-4101-8a3c-24fd81f0a991-config\") pod \"service-ca-operator-777779d784-dn92s\" (UID: \"6357283f-dd75-4101-8a3c-24fd81f0a991\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dn92s" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.711954 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/7f5ee13e-99ff-44b5-a302-dcbaf840fe79-signing-key\") pod \"service-ca-9c57cc56f-b6rvf\" (UID: \"7f5ee13e-99ff-44b5-a302-dcbaf840fe79\") " pod="openshift-service-ca/service-ca-9c57cc56f-b6rvf" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.711985 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zhhwt\" (UniqueName: \"kubernetes.io/projected/582fc06a-0d1d-4260-a91f-af317ab278d9-kube-api-access-zhhwt\") pod \"auto-csr-approver-29536352-8fqhc\" (UID: \"582fc06a-0d1d-4260-a91f-af317ab278d9\") " pod="openshift-infra/auto-csr-approver-29536352-8fqhc" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712011 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/1f4e9433-9a0f-4432-bfbe-b98fe89961bd-csi-data-dir\") pod \"csi-hostpathplugin-rzl5p\" (UID: \"1f4e9433-9a0f-4432-bfbe-b98fe89961bd\") " pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712036 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/d217f1ee-f917-4ac1-bf8d-c8a011d42ebc-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-p78ck\" (UID: \"d217f1ee-f917-4ac1-bf8d-c8a011d42ebc\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p78ck" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712068 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0f7a799f-448a-414c-b8a0-8bd5f016e5c1-images\") pod \"machine-config-operator-74547568cd-9jcn2\" (UID: \"0f7a799f-448a-414c-b8a0-8bd5f016e5c1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712127 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ffd1a145-703e-452a-b666-41c9c837985d-cert\") pod \"ingress-canary-vkqzc\" (UID: \"ffd1a145-703e-452a-b666-41c9c837985d\") " pod="openshift-ingress-canary/ingress-canary-vkqzc" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712153 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r9z98\" (UniqueName: \"kubernetes.io/projected/739fb53a-c353-4113-9f21-062b6580a184-kube-api-access-r9z98\") pod \"marketplace-operator-79b997595-pqnpm\" (UID: \"739fb53a-c353-4113-9f21-062b6580a184\") " pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712176 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/db9e745a-c371-4b93-91ba-f755f3d4929d-metrics-certs\") pod \"router-default-5444994796-bm27t\" (UID: \"db9e745a-c371-4b93-91ba-f755f3d4929d\") " pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712202 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ce4d5219-eab9-4fda-bf96-5a7ef9056300-config-volume\") pod \"dns-default-hhgws\" (UID: \"ce4d5219-eab9-4fda-bf96-5a7ef9056300\") " pod="openshift-dns/dns-default-hhgws" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712224 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/917d272c-e31b-420f-9408-06f7130b76ba-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dkx2c\" (UID: \"917d272c-e31b-420f-9408-06f7130b76ba\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dkx2c" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712247 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkntp\" (UniqueName: \"kubernetes.io/projected/75b3451d-2451-4a02-a510-fa27d6ea841a-kube-api-access-zkntp\") pod \"multus-admission-controller-857f4d67dd-q826k\" (UID: \"75b3451d-2451-4a02-a510-fa27d6ea841a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q826k" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712270 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/74d50313-482b-4d26-acd9-ce88a2c53093-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-ppcv6\" (UID: \"74d50313-482b-4d26-acd9-ce88a2c53093\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ppcv6" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712289 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/1f4e9433-9a0f-4432-bfbe-b98fe89961bd-registration-dir\") pod \"csi-hostpathplugin-rzl5p\" (UID: \"1f4e9433-9a0f-4432-bfbe-b98fe89961bd\") " pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712323 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88g4m\" (UniqueName: \"kubernetes.io/projected/1f4e9433-9a0f-4432-bfbe-b98fe89961bd-kube-api-access-88g4m\") pod \"csi-hostpathplugin-rzl5p\" (UID: \"1f4e9433-9a0f-4432-bfbe-b98fe89961bd\") " pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712344 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/db9e745a-c371-4b93-91ba-f755f3d4929d-stats-auth\") pod \"router-default-5444994796-bm27t\" (UID: \"db9e745a-c371-4b93-91ba-f755f3d4929d\") " pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712367 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/74d50313-482b-4d26-acd9-ce88a2c53093-proxy-tls\") pod \"machine-config-controller-84d6567774-ppcv6\" (UID: \"74d50313-482b-4d26-acd9-ce88a2c53093\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ppcv6" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712388 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ce4d5219-eab9-4fda-bf96-5a7ef9056300-metrics-tls\") pod \"dns-default-hhgws\" (UID: \"ce4d5219-eab9-4fda-bf96-5a7ef9056300\") " pod="openshift-dns/dns-default-hhgws" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712409 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhvpx\" (UniqueName: \"kubernetes.io/projected/d217f1ee-f917-4ac1-bf8d-c8a011d42ebc-kube-api-access-fhvpx\") pod \"control-plane-machine-set-operator-78cbb6b69f-p78ck\" (UID: \"d217f1ee-f917-4ac1-bf8d-c8a011d42ebc\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p78ck" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712445 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ac646565-cd5f-405a-ad92-3f2afb51d5c8-secret-volume\") pod \"collect-profiles-29536350-kfpvx\" (UID: \"ac646565-cd5f-405a-ad92-3f2afb51d5c8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712469 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9vqm\" (UniqueName: \"kubernetes.io/projected/c1d09410-c54c-478a-a350-58f3310099af-kube-api-access-n9vqm\") pod \"machine-config-server-nd8mn\" (UID: \"c1d09410-c54c-478a-a350-58f3310099af\") " pod="openshift-machine-config-operator/machine-config-server-nd8mn" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712497 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/739fb53a-c353-4113-9f21-062b6580a184-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pqnpm\" (UID: \"739fb53a-c353-4113-9f21-062b6580a184\") " pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712531 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/1f4e9433-9a0f-4432-bfbe-b98fe89961bd-plugins-dir\") pod \"csi-hostpathplugin-rzl5p\" (UID: \"1f4e9433-9a0f-4432-bfbe-b98fe89961bd\") " pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712554 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vksxh\" (UniqueName: \"kubernetes.io/projected/9c399ec5-479b-4e68-b556-691d6b9dd26f-kube-api-access-vksxh\") pod \"package-server-manager-789f6589d5-mmsmb\" (UID: \"9c399ec5-479b-4e68-b556-691d6b9dd26f\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mmsmb" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712579 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ac646565-cd5f-405a-ad92-3f2afb51d5c8-config-volume\") pod \"collect-profiles-29536350-kfpvx\" (UID: \"ac646565-cd5f-405a-ad92-3f2afb51d5c8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712602 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/144c52bf-12d2-4b24-baf9-f8f2b0a587e1-config\") pod \"kube-apiserver-operator-766d6c64bb-rk76z\" (UID: \"144c52bf-12d2-4b24-baf9-f8f2b0a587e1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-rk76z" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712637 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8be167c5-1053-42f7-ae0a-11b4fc0d8333-srv-cert\") pod \"olm-operator-6b444d44fb-s8vkh\" (UID: \"8be167c5-1053-42f7-ae0a-11b4fc0d8333\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s8vkh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712656 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/144c52bf-12d2-4b24-baf9-f8f2b0a587e1-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-rk76z\" (UID: \"144c52bf-12d2-4b24-baf9-f8f2b0a587e1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-rk76z" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712678 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hl4cc\" (UniqueName: \"kubernetes.io/projected/dbd83c42-7c7f-4257-9105-58f2e2fae841-kube-api-access-hl4cc\") pod \"kube-storage-version-migrator-operator-b67b599dd-wvwrb\" (UID: \"dbd83c42-7c7f-4257-9105-58f2e2fae841\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wvwrb" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712701 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dbd83c42-7c7f-4257-9105-58f2e2fae841-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-wvwrb\" (UID: \"dbd83c42-7c7f-4257-9105-58f2e2fae841\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wvwrb" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712723 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbd83c42-7c7f-4257-9105-58f2e2fae841-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-wvwrb\" (UID: \"dbd83c42-7c7f-4257-9105-58f2e2fae841\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wvwrb" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712746 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/1f4e9433-9a0f-4432-bfbe-b98fe89961bd-mountpoint-dir\") pod \"csi-hostpathplugin-rzl5p\" (UID: \"1f4e9433-9a0f-4432-bfbe-b98fe89961bd\") " pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712769 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/db9e745a-c371-4b93-91ba-f755f3d4929d-service-ca-bundle\") pod \"router-default-5444994796-bm27t\" (UID: \"db9e745a-c371-4b93-91ba-f755f3d4929d\") " pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712813 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0f7a799f-448a-414c-b8a0-8bd5f016e5c1-proxy-tls\") pod \"machine-config-operator-74547568cd-9jcn2\" (UID: \"0f7a799f-448a-414c-b8a0-8bd5f016e5c1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712837 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/db9e745a-c371-4b93-91ba-f755f3d4929d-default-certificate\") pod \"router-default-5444994796-bm27t\" (UID: \"db9e745a-c371-4b93-91ba-f755f3d4929d\") " pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712865 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkdqg\" (UniqueName: \"kubernetes.io/projected/ffd1a145-703e-452a-b666-41c9c837985d-kube-api-access-kkdqg\") pod \"ingress-canary-vkqzc\" (UID: \"ffd1a145-703e-452a-b666-41c9c837985d\") " pod="openshift-ingress-canary/ingress-canary-vkqzc" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712905 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5mct\" (UniqueName: \"kubernetes.io/projected/8be167c5-1053-42f7-ae0a-11b4fc0d8333-kube-api-access-q5mct\") pod \"olm-operator-6b444d44fb-s8vkh\" (UID: \"8be167c5-1053-42f7-ae0a-11b4fc0d8333\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s8vkh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712930 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/739fb53a-c353-4113-9f21-062b6580a184-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pqnpm\" (UID: \"739fb53a-c353-4113-9f21-062b6580a184\") " pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712968 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqvpj\" (UniqueName: \"kubernetes.io/projected/74d50313-482b-4d26-acd9-ce88a2c53093-kube-api-access-pqvpj\") pod \"machine-config-controller-84d6567774-ppcv6\" (UID: \"74d50313-482b-4d26-acd9-ce88a2c53093\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ppcv6" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.712994 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f5b83410-ebfb-4bd0-a32e-b51029bd42cf-apiservice-cert\") pod \"packageserver-d55dfcdfc-4655n\" (UID: \"f5b83410-ebfb-4bd0-a32e-b51029bd42cf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.713019 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8be167c5-1053-42f7-ae0a-11b4fc0d8333-profile-collector-cert\") pod \"olm-operator-6b444d44fb-s8vkh\" (UID: \"8be167c5-1053-42f7-ae0a-11b4fc0d8333\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s8vkh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.713042 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/144c52bf-12d2-4b24-baf9-f8f2b0a587e1-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-rk76z\" (UID: \"144c52bf-12d2-4b24-baf9-f8f2b0a587e1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-rk76z" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.713069 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9c399ec5-479b-4e68-b556-691d6b9dd26f-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mmsmb\" (UID: \"9c399ec5-479b-4e68-b556-691d6b9dd26f\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mmsmb" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.713095 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/1f4e9433-9a0f-4432-bfbe-b98fe89961bd-socket-dir\") pod \"csi-hostpathplugin-rzl5p\" (UID: \"1f4e9433-9a0f-4432-bfbe-b98fe89961bd\") " pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.713113 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/c1d09410-c54c-478a-a350-58f3310099af-node-bootstrap-token\") pod \"machine-config-server-nd8mn\" (UID: \"c1d09410-c54c-478a-a350-58f3310099af\") " pod="openshift-machine-config-operator/machine-config-server-nd8mn" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.713135 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/917d272c-e31b-420f-9408-06f7130b76ba-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dkx2c\" (UID: \"917d272c-e31b-420f-9408-06f7130b76ba\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dkx2c" Feb 27 08:32:00 crc kubenswrapper[4906]: E0227 08:32:00.713212 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:01.213186514 +0000 UTC m=+219.607588134 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.713243 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmd5p\" (UniqueName: \"kubernetes.io/projected/f5b83410-ebfb-4bd0-a32e-b51029bd42cf-kube-api-access-rmd5p\") pod \"packageserver-d55dfcdfc-4655n\" (UID: \"f5b83410-ebfb-4bd0-a32e-b51029bd42cf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.713266 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/20abae03-4ab8-4e0e-91f2-f4ec1bfc9529-srv-cert\") pod \"catalog-operator-68c6474976-m8zqh\" (UID: \"20abae03-4ab8-4e0e-91f2-f4ec1bfc9529\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.713291 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0f7a799f-448a-414c-b8a0-8bd5f016e5c1-auth-proxy-config\") pod \"machine-config-operator-74547568cd-9jcn2\" (UID: \"0f7a799f-448a-414c-b8a0-8bd5f016e5c1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.713316 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/917d272c-e31b-420f-9408-06f7130b76ba-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dkx2c\" (UID: \"917d272c-e31b-420f-9408-06f7130b76ba\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dkx2c" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.713339 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7vz4p\" (UniqueName: \"kubernetes.io/projected/6357283f-dd75-4101-8a3c-24fd81f0a991-kube-api-access-7vz4p\") pod \"service-ca-operator-777779d784-dn92s\" (UID: \"6357283f-dd75-4101-8a3c-24fd81f0a991\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dn92s" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.713362 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/20abae03-4ab8-4e0e-91f2-f4ec1bfc9529-profile-collector-cert\") pod \"catalog-operator-68c6474976-m8zqh\" (UID: \"20abae03-4ab8-4e0e-91f2-f4ec1bfc9529\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.713387 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sgwc\" (UniqueName: \"kubernetes.io/projected/ac646565-cd5f-405a-ad92-3f2afb51d5c8-kube-api-access-9sgwc\") pod \"collect-profiles-29536350-kfpvx\" (UID: \"ac646565-cd5f-405a-ad92-3f2afb51d5c8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.713421 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9wgc\" (UniqueName: \"kubernetes.io/projected/f743212c-ed63-408e-8063-ed04c8a7a1a9-kube-api-access-x9wgc\") pod \"auto-csr-approver-29536350-85r2j\" (UID: \"f743212c-ed63-408e-8063-ed04c8a7a1a9\") " pod="openshift-infra/auto-csr-approver-29536350-85r2j" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.713444 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f5b83410-ebfb-4bd0-a32e-b51029bd42cf-webhook-cert\") pod \"packageserver-d55dfcdfc-4655n\" (UID: \"f5b83410-ebfb-4bd0-a32e-b51029bd42cf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.713467 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/f5b83410-ebfb-4bd0-a32e-b51029bd42cf-tmpfs\") pod \"packageserver-d55dfcdfc-4655n\" (UID: \"f5b83410-ebfb-4bd0-a32e-b51029bd42cf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.713487 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/c1d09410-c54c-478a-a350-58f3310099af-certs\") pod \"machine-config-server-nd8mn\" (UID: \"c1d09410-c54c-478a-a350-58f3310099af\") " pod="openshift-machine-config-operator/machine-config-server-nd8mn" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.713513 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/75b3451d-2451-4a02-a510-fa27d6ea841a-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-q826k\" (UID: \"75b3451d-2451-4a02-a510-fa27d6ea841a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q826k" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.713537 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rl9nk\" (UniqueName: \"kubernetes.io/projected/db9e745a-c371-4b93-91ba-f755f3d4929d-kube-api-access-rl9nk\") pod \"router-default-5444994796-bm27t\" (UID: \"db9e745a-c371-4b93-91ba-f755f3d4929d\") " pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.713612 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/144c52bf-12d2-4b24-baf9-f8f2b0a587e1-config\") pod \"kube-apiserver-operator-766d6c64bb-rk76z\" (UID: \"144c52bf-12d2-4b24-baf9-f8f2b0a587e1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-rk76z" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.714911 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/db9e745a-c371-4b93-91ba-f755f3d4929d-service-ca-bundle\") pod \"router-default-5444994796-bm27t\" (UID: \"db9e745a-c371-4b93-91ba-f755f3d4929d\") " pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.715441 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/1f4e9433-9a0f-4432-bfbe-b98fe89961bd-plugins-dir\") pod \"csi-hostpathplugin-rzl5p\" (UID: \"1f4e9433-9a0f-4432-bfbe-b98fe89961bd\") " pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.715952 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6357283f-dd75-4101-8a3c-24fd81f0a991-serving-cert\") pod \"service-ca-operator-777779d784-dn92s\" (UID: \"6357283f-dd75-4101-8a3c-24fd81f0a991\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dn92s" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.716715 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/0f7a799f-448a-414c-b8a0-8bd5f016e5c1-images\") pod \"machine-config-operator-74547568cd-9jcn2\" (UID: \"0f7a799f-448a-414c-b8a0-8bd5f016e5c1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.717011 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/9c399ec5-479b-4e68-b556-691d6b9dd26f-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-mmsmb\" (UID: \"9c399ec5-479b-4e68-b556-691d6b9dd26f\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mmsmb" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.717117 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/1f4e9433-9a0f-4432-bfbe-b98fe89961bd-socket-dir\") pod \"csi-hostpathplugin-rzl5p\" (UID: \"1f4e9433-9a0f-4432-bfbe-b98fe89961bd\") " pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.717317 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8be167c5-1053-42f7-ae0a-11b4fc0d8333-profile-collector-cert\") pod \"olm-operator-6b444d44fb-s8vkh\" (UID: \"8be167c5-1053-42f7-ae0a-11b4fc0d8333\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s8vkh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.717798 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8be167c5-1053-42f7-ae0a-11b4fc0d8333-srv-cert\") pod \"olm-operator-6b444d44fb-s8vkh\" (UID: \"8be167c5-1053-42f7-ae0a-11b4fc0d8333\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s8vkh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.718466 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/144c52bf-12d2-4b24-baf9-f8f2b0a587e1-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-rk76z\" (UID: \"144c52bf-12d2-4b24-baf9-f8f2b0a587e1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-rk76z" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.718623 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/739fb53a-c353-4113-9f21-062b6580a184-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-pqnpm\" (UID: \"739fb53a-c353-4113-9f21-062b6580a184\") " pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.719478 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/f5b83410-ebfb-4bd0-a32e-b51029bd42cf-tmpfs\") pod \"packageserver-d55dfcdfc-4655n\" (UID: \"f5b83410-ebfb-4bd0-a32e-b51029bd42cf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.719593 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/1f4e9433-9a0f-4432-bfbe-b98fe89961bd-mountpoint-dir\") pod \"csi-hostpathplugin-rzl5p\" (UID: \"1f4e9433-9a0f-4432-bfbe-b98fe89961bd\") " pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.720067 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ffd1a145-703e-452a-b666-41c9c837985d-cert\") pod \"ingress-canary-vkqzc\" (UID: \"ffd1a145-703e-452a-b666-41c9c837985d\") " pod="openshift-ingress-canary/ingress-canary-vkqzc" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.720473 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/c1d09410-c54c-478a-a350-58f3310099af-node-bootstrap-token\") pod \"machine-config-server-nd8mn\" (UID: \"c1d09410-c54c-478a-a350-58f3310099af\") " pod="openshift-machine-config-operator/machine-config-server-nd8mn" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.720595 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/1f4e9433-9a0f-4432-bfbe-b98fe89961bd-csi-data-dir\") pod \"csi-hostpathplugin-rzl5p\" (UID: \"1f4e9433-9a0f-4432-bfbe-b98fe89961bd\") " pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.721218 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/db9e745a-c371-4b93-91ba-f755f3d4929d-stats-auth\") pod \"router-default-5444994796-bm27t\" (UID: \"db9e745a-c371-4b93-91ba-f755f3d4929d\") " pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.721227 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0f7a799f-448a-414c-b8a0-8bd5f016e5c1-proxy-tls\") pod \"machine-config-operator-74547568cd-9jcn2\" (UID: \"0f7a799f-448a-414c-b8a0-8bd5f016e5c1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.721649 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/dbd83c42-7c7f-4257-9105-58f2e2fae841-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-wvwrb\" (UID: \"dbd83c42-7c7f-4257-9105-58f2e2fae841\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wvwrb" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.721709 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/dbd83c42-7c7f-4257-9105-58f2e2fae841-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-wvwrb\" (UID: \"dbd83c42-7c7f-4257-9105-58f2e2fae841\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wvwrb" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.721980 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1-bound-sa-token\") pod \"ingress-operator-5b745b69d9-62r9p\" (UID: \"84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.722067 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ac646565-cd5f-405a-ad92-3f2afb51d5c8-secret-volume\") pod \"collect-profiles-29536350-kfpvx\" (UID: \"ac646565-cd5f-405a-ad92-3f2afb51d5c8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.722322 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/739fb53a-c353-4113-9f21-062b6580a184-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-pqnpm\" (UID: \"739fb53a-c353-4113-9f21-062b6580a184\") " pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.722362 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/d217f1ee-f917-4ac1-bf8d-c8a011d42ebc-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-p78ck\" (UID: \"d217f1ee-f917-4ac1-bf8d-c8a011d42ebc\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p78ck" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.722407 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/1f4e9433-9a0f-4432-bfbe-b98fe89961bd-registration-dir\") pod \"csi-hostpathplugin-rzl5p\" (UID: \"1f4e9433-9a0f-4432-bfbe-b98fe89961bd\") " pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.723029 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/7f5ee13e-99ff-44b5-a302-dcbaf840fe79-signing-cabundle\") pod \"service-ca-9c57cc56f-b6rvf\" (UID: \"7f5ee13e-99ff-44b5-a302-dcbaf840fe79\") " pod="openshift-service-ca/service-ca-9c57cc56f-b6rvf" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.723389 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ce4d5219-eab9-4fda-bf96-5a7ef9056300-config-volume\") pod \"dns-default-hhgws\" (UID: \"ce4d5219-eab9-4fda-bf96-5a7ef9056300\") " pod="openshift-dns/dns-default-hhgws" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.723489 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/c1d09410-c54c-478a-a350-58f3310099af-certs\") pod \"machine-config-server-nd8mn\" (UID: \"c1d09410-c54c-478a-a350-58f3310099af\") " pod="openshift-machine-config-operator/machine-config-server-nd8mn" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.723529 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ac646565-cd5f-405a-ad92-3f2afb51d5c8-config-volume\") pod \"collect-profiles-29536350-kfpvx\" (UID: \"ac646565-cd5f-405a-ad92-3f2afb51d5c8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.723553 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/74d50313-482b-4d26-acd9-ce88a2c53093-proxy-tls\") pod \"machine-config-controller-84d6567774-ppcv6\" (UID: \"74d50313-482b-4d26-acd9-ce88a2c53093\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ppcv6" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.724228 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/7f5ee13e-99ff-44b5-a302-dcbaf840fe79-signing-key\") pod \"service-ca-9c57cc56f-b6rvf\" (UID: \"7f5ee13e-99ff-44b5-a302-dcbaf840fe79\") " pod="openshift-service-ca/service-ca-9c57cc56f-b6rvf" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.724410 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/db9e745a-c371-4b93-91ba-f755f3d4929d-default-certificate\") pod \"router-default-5444994796-bm27t\" (UID: \"db9e745a-c371-4b93-91ba-f755f3d4929d\") " pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.724742 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/74d50313-482b-4d26-acd9-ce88a2c53093-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-ppcv6\" (UID: \"74d50313-482b-4d26-acd9-ce88a2c53093\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ppcv6" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.724950 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0f7a799f-448a-414c-b8a0-8bd5f016e5c1-auth-proxy-config\") pod \"machine-config-operator-74547568cd-9jcn2\" (UID: \"0f7a799f-448a-414c-b8a0-8bd5f016e5c1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.725868 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/75b3451d-2451-4a02-a510-fa27d6ea841a-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-q826k\" (UID: \"75b3451d-2451-4a02-a510-fa27d6ea841a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q826k" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.726663 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/20abae03-4ab8-4e0e-91f2-f4ec1bfc9529-srv-cert\") pod \"catalog-operator-68c6474976-m8zqh\" (UID: \"20abae03-4ab8-4e0e-91f2-f4ec1bfc9529\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.729055 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ce4d5219-eab9-4fda-bf96-5a7ef9056300-metrics-tls\") pod \"dns-default-hhgws\" (UID: \"ce4d5219-eab9-4fda-bf96-5a7ef9056300\") " pod="openshift-dns/dns-default-hhgws" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.729779 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/20abae03-4ab8-4e0e-91f2-f4ec1bfc9529-profile-collector-cert\") pod \"catalog-operator-68c6474976-m8zqh\" (UID: \"20abae03-4ab8-4e0e-91f2-f4ec1bfc9529\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.733784 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/db9e745a-c371-4b93-91ba-f755f3d4929d-metrics-certs\") pod \"router-default-5444994796-bm27t\" (UID: \"db9e745a-c371-4b93-91ba-f755f3d4929d\") " pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.734103 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/917d272c-e31b-420f-9408-06f7130b76ba-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dkx2c\" (UID: \"917d272c-e31b-420f-9408-06f7130b76ba\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dkx2c" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.734274 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/917d272c-e31b-420f-9408-06f7130b76ba-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dkx2c\" (UID: \"917d272c-e31b-420f-9408-06f7130b76ba\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dkx2c" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.735378 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6357283f-dd75-4101-8a3c-24fd81f0a991-config\") pod \"service-ca-operator-777779d784-dn92s\" (UID: \"6357283f-dd75-4101-8a3c-24fd81f0a991\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dn92s" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.740017 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f5b83410-ebfb-4bd0-a32e-b51029bd42cf-apiservice-cert\") pod \"packageserver-d55dfcdfc-4655n\" (UID: \"f5b83410-ebfb-4bd0-a32e-b51029bd42cf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.740231 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f5b83410-ebfb-4bd0-a32e-b51029bd42cf-webhook-cert\") pod \"packageserver-d55dfcdfc-4655n\" (UID: \"f5b83410-ebfb-4bd0-a32e-b51029bd42cf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.742864 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmxfn\" (UniqueName: \"kubernetes.io/projected/a7a26f83-d59b-4375-bcb0-89b52426dae7-kube-api-access-pmxfn\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.767288 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jll25\" (UniqueName: \"kubernetes.io/projected/13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850-kube-api-access-jll25\") pod \"cluster-image-registry-operator-dc59b4c8b-cr7bl\" (UID: \"13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cr7bl" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.784198 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-cr7bl\" (UID: \"13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cr7bl" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.796040 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.800577 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zs5qc\" (UniqueName: \"kubernetes.io/projected/84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1-kube-api-access-zs5qc\") pod \"ingress-operator-5b745b69d9-62r9p\" (UID: \"84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.806491 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-dq899"] Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.811952 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cr7bl" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.815330 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:00 crc kubenswrapper[4906]: E0227 08:32:00.815782 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:01.315769169 +0000 UTC m=+219.710170779 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.816204 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.824280 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c92m5\" (UniqueName: \"kubernetes.io/projected/f3cc181a-c108-493a-87fa-9bf76f81b062-kube-api-access-c92m5\") pod \"downloads-7954f5f757-hkv6l\" (UID: \"f3cc181a-c108-493a-87fa-9bf76f81b062\") " pod="openshift-console/downloads-7954f5f757-hkv6l" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.825806 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-6lqtc" event={"ID":"ea522559-4f3c-4d90-ae46-aa2a9f27b243","Type":"ContainerStarted","Data":"eed8be60cab0fc939c00bb988b95873d87c496e8b39204599903f9d216574baa"} Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.825851 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-6lqtc" event={"ID":"ea522559-4f3c-4d90-ae46-aa2a9f27b243","Type":"ContainerStarted","Data":"47a3f4f82e58278688d48c360d95339e2850454bcc00785ecd5dcdac6e4747c4"} Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.827784 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tns4l" event={"ID":"4ceb7b65-ef46-4eb8-a129-d462e1989488","Type":"ContainerStarted","Data":"02f6d65505a9eaeaa06f8409fc99abca1497e79cf9bb5d83d35f6c6973716f46"} Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.833101 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.836486 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" event={"ID":"390464d8-fc1d-443a-85f7-7164ac4e2d05","Type":"ContainerStarted","Data":"fc2dfcd733248be924a807b6e66df1a8b04691bab48b4dab5e2d284d20c14abf"} Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.838459 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" event={"ID":"0f8b1512-2590-418e-8504-70ef3c1567b0","Type":"ContainerStarted","Data":"6e506685611f14f68da96142cdd67c93409b4c3194638b4606b5cf9797c51d27"} Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.838521 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" event={"ID":"0f8b1512-2590-418e-8504-70ef3c1567b0","Type":"ContainerStarted","Data":"fb57066c6a21ca243fe0a91e97560b10bf46391dd734f025e4cfe0615a8311a1"} Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.843249 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xl55p" event={"ID":"543292c6-d79b-4198-932b-fdc68dcbbde5","Type":"ContainerStarted","Data":"2e04c3c69fb4333a0ec47aff4e5250567c10e17397b37eda13b866522448bac5"} Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.843340 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xl55p" event={"ID":"543292c6-d79b-4198-932b-fdc68dcbbde5","Type":"ContainerStarted","Data":"de3014de5fc4253d4eb3e1f17b596bac8689d76df5a345b37e9c01bc1b0c87e4"} Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.845571 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" event={"ID":"fd02452f-1a87-43a5-8f32-3b63d9f522e3","Type":"ContainerStarted","Data":"45d8cbbeb85b3c513ce25f7341a4e044e0a1f358a0142a8e68dd056b81313806"} Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.852355 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-l9pjw" event={"ID":"d6250048-cedf-4ca4-86f2-b0d45534d374","Type":"ContainerStarted","Data":"262d5aaa5ea264afd82b9a45eebda1d19cab7b9499151642fd49f011d137990f"} Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.852421 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-l9pjw" event={"ID":"d6250048-cedf-4ca4-86f2-b0d45534d374","Type":"ContainerStarted","Data":"e26d974e0434e7e1c77fc567fb04cf020b113956e91cc973a16319b28aea4efd"} Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.852945 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-79trw" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.854457 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-r8pwh" event={"ID":"ce294532-f4f2-46cf-b046-e81aa81f0f0c","Type":"ContainerStarted","Data":"09f4da0b828e3123cebcad2039eebb5eccbe5215d9de03191b16bb1027ae1730"} Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.856220 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" event={"ID":"177eae99-352f-45fe-9593-3e8ad8345bc6","Type":"ContainerStarted","Data":"758d6f034de8d51149bf38f940da22cec2f9ff6c25cebb26cd10420f5ce1757d"} Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.856245 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" event={"ID":"177eae99-352f-45fe-9593-3e8ad8345bc6","Type":"ContainerStarted","Data":"205c4248ace8e4f784d4ab1385e8dc4eaa2d89e809fd209749a8e2623a86ac06"} Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.857344 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pb9n9" event={"ID":"ce2700f0-be98-4622-a729-99b696e7ecb9","Type":"ContainerStarted","Data":"dc9c8b0bab7c4c14822f82ab069a3a669d2138e2b8943d5f0d9a26d0e3cd839d"} Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.857368 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pb9n9" event={"ID":"ce2700f0-be98-4622-a729-99b696e7ecb9","Type":"ContainerStarted","Data":"9956dae47ebf3cfc27084bd586108f51b69f1d6b37234d8f4e2bd7225ec078e8"} Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.858235 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-925p5" event={"ID":"78c06cb9-944c-4f1c-a705-a7264c1c87ff","Type":"ContainerStarted","Data":"8d4ad34a08ca172d0eee91ac2e4c170f7d54c5cdb2e0a299d5c223595107be5d"} Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.859098 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" event={"ID":"8a68474e-6d7c-4b29-89e0-90d25dbbf86e","Type":"ContainerStarted","Data":"fb6f0ac4289b3dc5d7ce59678afe6cc960be3efb203b4c3ffc5c4ab1f9170ddf"} Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.860537 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-r4ppj" event={"ID":"16f8b49f-8dc0-4aa5-bb80-9642e2688141","Type":"ContainerStarted","Data":"85681be5202b3c2fd273a7be742c8372b6f5d73309e92a30bfb7599c5e4abef7"} Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.861840 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2vdrh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.884676 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rl9nk\" (UniqueName: \"kubernetes.io/projected/db9e745a-c371-4b93-91ba-f755f3d4929d-kube-api-access-rl9nk\") pod \"router-default-5444994796-bm27t\" (UID: \"db9e745a-c371-4b93-91ba-f755f3d4929d\") " pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.900695 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88g4m\" (UniqueName: \"kubernetes.io/projected/1f4e9433-9a0f-4432-bfbe-b98fe89961bd-kube-api-access-88g4m\") pod \"csi-hostpathplugin-rzl5p\" (UID: \"1f4e9433-9a0f-4432-bfbe-b98fe89961bd\") " pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.910945 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/144c52bf-12d2-4b24-baf9-f8f2b0a587e1-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-rk76z\" (UID: \"144c52bf-12d2-4b24-baf9-f8f2b0a587e1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-rk76z" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.916178 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:00 crc kubenswrapper[4906]: E0227 08:32:00.917701 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:01.417680345 +0000 UTC m=+219.812081955 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.934231 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.952487 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5mct\" (UniqueName: \"kubernetes.io/projected/8be167c5-1053-42f7-ae0a-11b4fc0d8333-kube-api-access-q5mct\") pod \"olm-operator-6b444d44fb-s8vkh\" (UID: \"8be167c5-1053-42f7-ae0a-11b4fc0d8333\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s8vkh" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.964508 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vksxh\" (UniqueName: \"kubernetes.io/projected/9c399ec5-479b-4e68-b556-691d6b9dd26f-kube-api-access-vksxh\") pod \"package-server-manager-789f6589d5-mmsmb\" (UID: \"9c399ec5-479b-4e68-b556-691d6b9dd26f\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mmsmb" Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.978907 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9vqm\" (UniqueName: \"kubernetes.io/projected/c1d09410-c54c-478a-a350-58f3310099af-kube-api-access-n9vqm\") pod \"machine-config-server-nd8mn\" (UID: \"c1d09410-c54c-478a-a350-58f3310099af\") " pod="openshift-machine-config-operator/machine-config-server-nd8mn" Feb 27 08:32:00 crc kubenswrapper[4906]: W0227 08:32:00.983364 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddb9e745a_c371_4b93_91ba_f755f3d4929d.slice/crio-4cb2e627e9fa6dee510a53fc2e1e1a0713f07dca49bd4d157da6dfdb77175ec2 WatchSource:0}: Error finding container 4cb2e627e9fa6dee510a53fc2e1e1a0713f07dca49bd4d157da6dfdb77175ec2: Status 404 returned error can't find the container with id 4cb2e627e9fa6dee510a53fc2e1e1a0713f07dca49bd4d157da6dfdb77175ec2 Feb 27 08:32:00 crc kubenswrapper[4906]: I0227 08:32:00.984019 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkdqg\" (UniqueName: \"kubernetes.io/projected/ffd1a145-703e-452a-b666-41c9c837985d-kube-api-access-kkdqg\") pod \"ingress-canary-vkqzc\" (UID: \"ffd1a145-703e-452a-b666-41c9c837985d\") " pod="openshift-ingress-canary/ingress-canary-vkqzc" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.002586 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hl4cc\" (UniqueName: \"kubernetes.io/projected/dbd83c42-7c7f-4257-9105-58f2e2fae841-kube-api-access-hl4cc\") pod \"kube-storage-version-migrator-operator-b67b599dd-wvwrb\" (UID: \"dbd83c42-7c7f-4257-9105-58f2e2fae841\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wvwrb" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.009190 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s8vkh" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.014110 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-rk76z" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.018345 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:01 crc kubenswrapper[4906]: E0227 08:32:01.018803 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:01.518787488 +0000 UTC m=+219.913189098 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.028925 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhvpx\" (UniqueName: \"kubernetes.io/projected/d217f1ee-f917-4ac1-bf8d-c8a011d42ebc-kube-api-access-fhvpx\") pod \"control-plane-machine-set-operator-78cbb6b69f-p78ck\" (UID: \"d217f1ee-f917-4ac1-bf8d-c8a011d42ebc\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p78ck" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.042035 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkntp\" (UniqueName: \"kubernetes.io/projected/75b3451d-2451-4a02-a510-fa27d6ea841a-kube-api-access-zkntp\") pod \"multus-admission-controller-857f4d67dd-q826k\" (UID: \"75b3451d-2451-4a02-a510-fa27d6ea841a\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q826k" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.064259 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zhhwt\" (UniqueName: \"kubernetes.io/projected/582fc06a-0d1d-4260-a91f-af317ab278d9-kube-api-access-zhhwt\") pod \"auto-csr-approver-29536352-8fqhc\" (UID: \"582fc06a-0d1d-4260-a91f-af317ab278d9\") " pod="openshift-infra/auto-csr-approver-29536352-8fqhc" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.070136 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mmsmb" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.076059 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cr7bl"] Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.079931 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-nd8mn" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.084743 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2x6nx\" (UniqueName: \"kubernetes.io/projected/ce4d5219-eab9-4fda-bf96-5a7ef9056300-kube-api-access-2x6nx\") pod \"dns-default-hhgws\" (UID: \"ce4d5219-eab9-4fda-bf96-5a7ef9056300\") " pod="openshift-dns/dns-default-hhgws" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.095715 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-hhgws" Feb 27 08:32:01 crc kubenswrapper[4906]: W0227 08:32:01.103090 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod13cab0b5_01fb_4f0f_9fc7_cc5a6e2ad850.slice/crio-6ee60424781291b48a7684915ef23d4258022da08bb2a0a2ee4971c089e05ed7 WatchSource:0}: Error finding container 6ee60424781291b48a7684915ef23d4258022da08bb2a0a2ee4971c089e05ed7: Status 404 returned error can't find the container with id 6ee60424781291b48a7684915ef23d4258022da08bb2a0a2ee4971c089e05ed7 Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.105083 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-vkqzc" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.107913 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmd5p\" (UniqueName: \"kubernetes.io/projected/f5b83410-ebfb-4bd0-a32e-b51029bd42cf-kube-api-access-rmd5p\") pod \"packageserver-d55dfcdfc-4655n\" (UID: \"f5b83410-ebfb-4bd0-a32e-b51029bd42cf\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.118346 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.119621 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:01 crc kubenswrapper[4906]: E0227 08:32:01.119837 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:01.619802739 +0000 UTC m=+220.014204349 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.120124 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:01 crc kubenswrapper[4906]: E0227 08:32:01.120478 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:01.620464467 +0000 UTC m=+220.014866077 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.123552 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-hkv6l" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.123701 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7vz4p\" (UniqueName: \"kubernetes.io/projected/6357283f-dd75-4101-8a3c-24fd81f0a991-kube-api-access-7vz4p\") pod \"service-ca-operator-777779d784-dn92s\" (UID: \"6357283f-dd75-4101-8a3c-24fd81f0a991\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dn92s" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.148294 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtblc\" (UniqueName: \"kubernetes.io/projected/0f7a799f-448a-414c-b8a0-8bd5f016e5c1-kube-api-access-rtblc\") pod \"machine-config-operator-74547568cd-9jcn2\" (UID: \"0f7a799f-448a-414c-b8a0-8bd5f016e5c1\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.168839 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sgwc\" (UniqueName: \"kubernetes.io/projected/ac646565-cd5f-405a-ad92-3f2afb51d5c8-kube-api-access-9sgwc\") pod \"collect-profiles-29536350-kfpvx\" (UID: \"ac646565-cd5f-405a-ad92-3f2afb51d5c8\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.172494 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wvwrb" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.172800 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg"] Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.181442 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.191687 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/917d272c-e31b-420f-9408-06f7130b76ba-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-dkx2c\" (UID: \"917d272c-e31b-420f-9408-06f7130b76ba\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dkx2c" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.195265 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536352-8fqhc" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.209070 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r9z98\" (UniqueName: \"kubernetes.io/projected/739fb53a-c353-4113-9f21-062b6580a184-kube-api-access-r9z98\") pod \"marketplace-operator-79b997595-pqnpm\" (UID: \"739fb53a-c353-4113-9f21-062b6580a184\") " pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.209651 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.221739 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:01 crc kubenswrapper[4906]: E0227 08:32:01.222474 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:01.722445015 +0000 UTC m=+220.116846625 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.222557 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:01 crc kubenswrapper[4906]: E0227 08:32:01.223026 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:01.723019232 +0000 UTC m=+220.117420842 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.230904 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbtgj\" (UniqueName: \"kubernetes.io/projected/20abae03-4ab8-4e0e-91f2-f4ec1bfc9529-kube-api-access-tbtgj\") pod \"catalog-operator-68c6474976-m8zqh\" (UID: \"20abae03-4ab8-4e0e-91f2-f4ec1bfc9529\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.249238 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p78ck" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.264601 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m89v8\" (UniqueName: \"kubernetes.io/projected/7f5ee13e-99ff-44b5-a302-dcbaf840fe79-kube-api-access-m89v8\") pod \"service-ca-9c57cc56f-b6rvf\" (UID: \"7f5ee13e-99ff-44b5-a302-dcbaf840fe79\") " pod="openshift-service-ca/service-ca-9c57cc56f-b6rvf" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.265156 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dkx2c" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.271232 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.279259 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.279523 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p"] Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.280146 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqvpj\" (UniqueName: \"kubernetes.io/projected/74d50313-482b-4d26-acd9-ce88a2c53093-kube-api-access-pqvpj\") pod \"machine-config-controller-84d6567774-ppcv6\" (UID: \"74d50313-482b-4d26-acd9-ce88a2c53093\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ppcv6" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.287260 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-q826k" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.296357 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.314175 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9wgc\" (UniqueName: \"kubernetes.io/projected/f743212c-ed63-408e-8063-ed04c8a7a1a9-kube-api-access-x9wgc\") pod \"auto-csr-approver-29536350-85r2j\" (UID: \"f743212c-ed63-408e-8063-ed04c8a7a1a9\") " pod="openshift-infra/auto-csr-approver-29536350-85r2j" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.324458 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-dn92s" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.324634 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:01 crc kubenswrapper[4906]: E0227 08:32:01.325155 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:01.825130504 +0000 UTC m=+220.219532124 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.337335 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-xx8qm"] Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.339237 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-79trw"] Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.348570 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536350-85r2j" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.361125 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-b6rvf" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.366641 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2vdrh"] Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.428604 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:01 crc kubenswrapper[4906]: E0227 08:32:01.429237 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:01.929031406 +0000 UTC m=+220.323433026 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.493080 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ppcv6" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.524712 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s8vkh"] Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.534142 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:01 crc kubenswrapper[4906]: E0227 08:32:01.534532 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:02.03436383 +0000 UTC m=+220.428765440 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.535072 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:01 crc kubenswrapper[4906]: E0227 08:32:01.535803 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:02.03579137 +0000 UTC m=+220.430192970 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.585414 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-rzl5p"] Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.625720 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-hhgws"] Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.644331 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:01 crc kubenswrapper[4906]: E0227 08:32:01.644771 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:02.144746037 +0000 UTC m=+220.539147647 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.657307 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-rk76z"] Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.683956 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mmsmb"] Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.737150 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-hkv6l"] Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.747649 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:01 crc kubenswrapper[4906]: E0227 08:32:01.748131 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:02.248112785 +0000 UTC m=+220.642514395 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:01 crc kubenswrapper[4906]: W0227 08:32:01.751701 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef6057ac_2815_4543_ba7c_ae864cc3a830.slice/crio-2433b87cab90b77b86d94b312f10273b91474e50d6fdbab583ce65bbe8b5741a WatchSource:0}: Error finding container 2433b87cab90b77b86d94b312f10273b91474e50d6fdbab583ce65bbe8b5741a: Status 404 returned error can't find the container with id 2433b87cab90b77b86d94b312f10273b91474e50d6fdbab583ce65bbe8b5741a Feb 27 08:32:01 crc kubenswrapper[4906]: W0227 08:32:01.755217 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod05f137f2_dd6a_4dc0_b02d_31e9b81d2a8d.slice/crio-ef16214b42842e87b057a1ffde7a90592b7cbc5812db253b099722de991ce3c7 WatchSource:0}: Error finding container ef16214b42842e87b057a1ffde7a90592b7cbc5812db253b099722de991ce3c7: Status 404 returned error can't find the container with id ef16214b42842e87b057a1ffde7a90592b7cbc5812db253b099722de991ce3c7 Feb 27 08:32:01 crc kubenswrapper[4906]: W0227 08:32:01.755464 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c399ec5_479b_4e68_b556_691d6b9dd26f.slice/crio-d35c2c4f8f0393f37f07b5a74efeee92329a48d5561415a96ca600a15939a3e1 WatchSource:0}: Error finding container d35c2c4f8f0393f37f07b5a74efeee92329a48d5561415a96ca600a15939a3e1: Status 404 returned error can't find the container with id d35c2c4f8f0393f37f07b5a74efeee92329a48d5561415a96ca600a15939a3e1 Feb 27 08:32:01 crc kubenswrapper[4906]: W0227 08:32:01.778425 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8be167c5_1053_42f7_ae0a_11b4fc0d8333.slice/crio-3927bf979088af74f8372570caf7cb32ede6cb5764c48850cd5ba3da3ee9b862 WatchSource:0}: Error finding container 3927bf979088af74f8372570caf7cb32ede6cb5764c48850cd5ba3da3ee9b862: Status 404 returned error can't find the container with id 3927bf979088af74f8372570caf7cb32ede6cb5764c48850cd5ba3da3ee9b862 Feb 27 08:32:01 crc kubenswrapper[4906]: W0227 08:32:01.778775 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f4e9433_9a0f_4432_bfbe_b98fe89961bd.slice/crio-e30734b81cd65d044037338018b37c64ccbd7589e679e77cbf936c48b0eeb29c WatchSource:0}: Error finding container e30734b81cd65d044037338018b37c64ccbd7589e679e77cbf936c48b0eeb29c: Status 404 returned error can't find the container with id e30734b81cd65d044037338018b37c64ccbd7589e679e77cbf936c48b0eeb29c Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.848504 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:01 crc kubenswrapper[4906]: E0227 08:32:01.849530 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:02.349501686 +0000 UTC m=+220.743903296 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.862426 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n"] Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.870161 4906 generic.go:334] "Generic (PLEG): container finished" podID="543292c6-d79b-4198-932b-fdc68dcbbde5" containerID="2e04c3c69fb4333a0ec47aff4e5250567c10e17397b37eda13b866522448bac5" exitCode=0 Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.870238 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xl55p" event={"ID":"543292c6-d79b-4198-932b-fdc68dcbbde5","Type":"ContainerDied","Data":"2e04c3c69fb4333a0ec47aff4e5250567c10e17397b37eda13b866522448bac5"} Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.878121 4906 generic.go:334] "Generic (PLEG): container finished" podID="8a68474e-6d7c-4b29-89e0-90d25dbbf86e" containerID="a7c18c51ae9398c455c712a6b63d69c89b126b91d4de153e5773c31169176513" exitCode=0 Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.878209 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" event={"ID":"8a68474e-6d7c-4b29-89e0-90d25dbbf86e","Type":"ContainerDied","Data":"a7c18c51ae9398c455c712a6b63d69c89b126b91d4de153e5773c31169176513"} Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.881659 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" event={"ID":"1f4e9433-9a0f-4432-bfbe-b98fe89961bd","Type":"ContainerStarted","Data":"e30734b81cd65d044037338018b37c64ccbd7589e679e77cbf936c48b0eeb29c"} Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.884266 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-nd8mn" event={"ID":"c1d09410-c54c-478a-a350-58f3310099af","Type":"ContainerStarted","Data":"08467d1a50ba6635ba4fd3363fd20c59a7a86b29c64c4a4ccc75682eb000c4f9"} Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.890767 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cr7bl" event={"ID":"13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850","Type":"ContainerStarted","Data":"9751a01173593c258d351eea93e1138d503394ac556f5da8885de295d0eeb26d"} Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.890832 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cr7bl" event={"ID":"13cab0b5-01fb-4f0f-9fc7-cc5a6e2ad850","Type":"ContainerStarted","Data":"6ee60424781291b48a7684915ef23d4258022da08bb2a0a2ee4971c089e05ed7"} Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.895995 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p" event={"ID":"84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1","Type":"ContainerStarted","Data":"8225df8ad53912a3cae83f4f6c67a2a4ada1a3431ee1e581b97f9737f4ef9ea1"} Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.899997 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s8vkh" event={"ID":"8be167c5-1053-42f7-ae0a-11b4fc0d8333","Type":"ContainerStarted","Data":"3927bf979088af74f8372570caf7cb32ede6cb5764c48850cd5ba3da3ee9b862"} Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.901324 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-vkqzc"] Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.903325 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-925p5" event={"ID":"78c06cb9-944c-4f1c-a705-a7264c1c87ff","Type":"ContainerStarted","Data":"fa1db566fb22960d3b9748da84c54af5af65d448e41a349acb2b7054b94dbf09"} Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.933898 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-bm27t" event={"ID":"db9e745a-c371-4b93-91ba-f755f3d4929d","Type":"ContainerStarted","Data":"4a7a447e3c0e893ed415680c2ba5466ef805c6454ea1082009f167014730eb58"} Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.933953 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-bm27t" event={"ID":"db9e745a-c371-4b93-91ba-f755f3d4929d","Type":"ContainerStarted","Data":"4cb2e627e9fa6dee510a53fc2e1e1a0713f07dca49bd4d157da6dfdb77175ec2"} Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.938702 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.941431 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.941479 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.951228 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:01 crc kubenswrapper[4906]: E0227 08:32:01.954343 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:02.454324155 +0000 UTC m=+220.848725765 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.958163 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tns4l" event={"ID":"4ceb7b65-ef46-4eb8-a129-d462e1989488","Type":"ContainerStarted","Data":"a19ec7cc9afb707ad8e9744b0f1707681f5e532c4caf26e6140c33fe80502671"} Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.971721 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-dq899" event={"ID":"09bfe857-0a67-4902-a9b9-5738a2074657","Type":"ContainerStarted","Data":"a21f59711cac35e61f93511b9771dc5fc1c2dd5c2baea527b2e668f171f2460f"} Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.971779 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-dq899" event={"ID":"09bfe857-0a67-4902-a9b9-5738a2074657","Type":"ContainerStarted","Data":"0970c8bb145bcad19a030f4d95e7b249253d7439e6fd2bdbdf2ba27d677e060c"} Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.973044 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-dq899" Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.982535 4906 generic.go:334] "Generic (PLEG): container finished" podID="fd02452f-1a87-43a5-8f32-3b63d9f522e3" containerID="1e9c619c622fea428972dfbe535081f047b892835bb9e6edbf1b3fcd9e9b1ae2" exitCode=0 Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.982629 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" event={"ID":"fd02452f-1a87-43a5-8f32-3b63d9f522e3","Type":"ContainerDied","Data":"1e9c619c622fea428972dfbe535081f047b892835bb9e6edbf1b3fcd9e9b1ae2"} Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.997278 4906 patch_prober.go:28] interesting pod/console-operator-58897d9998-dq899 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.19:8443/readyz\": dial tcp 10.217.0.19:8443: connect: connection refused" start-of-body= Feb 27 08:32:01 crc kubenswrapper[4906]: I0227 08:32:01.997342 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-dq899" podUID="09bfe857-0a67-4902-a9b9-5738a2074657" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.19:8443/readyz\": dial tcp 10.217.0.19:8443: connect: connection refused" Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.053232 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:02 crc kubenswrapper[4906]: E0227 08:32:02.053803 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:02.553771621 +0000 UTC m=+220.948173231 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.054198 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-hhgws" event={"ID":"ce4d5219-eab9-4fda-bf96-5a7ef9056300","Type":"ContainerStarted","Data":"31dc3fe196f2f6a040b89bdaaca376a3c759559d98d405fff97083d9417a8166"} Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.055248 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:02 crc kubenswrapper[4906]: E0227 08:32:02.061867 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:02.561547702 +0000 UTC m=+220.955949312 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.074046 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wvwrb"] Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.093102 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-r4ppj" event={"ID":"16f8b49f-8dc0-4aa5-bb80-9642e2688141","Type":"ContainerStarted","Data":"87e8d6ca5790f84a46b1ed5fb86296da2415381f345342bb3f4972657ab7b351"} Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.099610 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-hkv6l" event={"ID":"f3cc181a-c108-493a-87fa-9bf76f81b062","Type":"ContainerStarted","Data":"0c32616aba63d56ea86836eada326275ac696c1dc3d3142cc72d20890580df48"} Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.107088 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" event={"ID":"390464d8-fc1d-443a-85f7-7164ac4e2d05","Type":"ContainerStarted","Data":"98cfc4860b8cf1fb3a8d786f9622628bd2afee5c5a331adcaebbd8356e896bec"} Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.107714 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.109949 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536352-8fqhc"] Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.117138 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2"] Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.118410 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-rk76z" event={"ID":"144c52bf-12d2-4b24-baf9-f8f2b0a587e1","Type":"ContainerStarted","Data":"40a4075adf5278210c01cdeba34ef1868a2b9219b468770410729698333fb551"} Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.123388 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" event={"ID":"d8e9a89a-0411-4456-9637-08712b004662","Type":"ContainerStarted","Data":"70be84e3d1d6ae7f1e57ee105fc054ab5aea084765f1b6ae067b22cfac2b18dd"} Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.135570 4906 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-qvd5m container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.135658 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" podUID="390464d8-fc1d-443a-85f7-7164ac4e2d05" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.150466 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-79trw" event={"ID":"05f137f2-dd6a-4dc0-b02d-31e9b81d2a8d","Type":"ContainerStarted","Data":"ef16214b42842e87b057a1ffde7a90592b7cbc5812db253b099722de991ce3c7"} Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.157194 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:02 crc kubenswrapper[4906]: E0227 08:32:02.157461 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:02.657438647 +0000 UTC m=+221.051840257 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.157580 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:02 crc kubenswrapper[4906]: E0227 08:32:02.159589 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:02.659565647 +0000 UTC m=+221.053967297 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.167581 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-q826k"] Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.171858 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mmsmb" event={"ID":"9c399ec5-479b-4e68-b556-691d6b9dd26f","Type":"ContainerStarted","Data":"d35c2c4f8f0393f37f07b5a74efeee92329a48d5561415a96ca600a15939a3e1"} Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.183539 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-r8pwh" event={"ID":"ce294532-f4f2-46cf-b046-e81aa81f0f0c","Type":"ContainerStarted","Data":"d0973c53ba5bfa14d32d9f8284bd122b2a24049336996200d3043266a59c914a"} Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.199139 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xx8qm" event={"ID":"a861f5dc-100c-443f-ab72-ecfe71895998","Type":"ContainerStarted","Data":"a17b7c6f779e5df4e396835875b24755438bba43b8c32d643f837c06879256ac"} Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.211921 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2vdrh" event={"ID":"ef6057ac-2815-4543-ba7c-ae864cc3a830","Type":"ContainerStarted","Data":"2433b87cab90b77b86d94b312f10273b91474e50d6fdbab583ce65bbe8b5741a"} Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.213163 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.217460 4906 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-lz5rn container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.16:6443/healthz\": dial tcp 10.217.0.16:6443: connect: connection refused" start-of-body= Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.217814 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" podUID="0f8b1512-2590-418e-8504-70ef3c1567b0" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.16:6443/healthz\": dial tcp 10.217.0.16:6443: connect: connection refused" Feb 27 08:32:02 crc kubenswrapper[4906]: W0227 08:32:02.229067 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod582fc06a_0d1d_4260_a91f_af317ab278d9.slice/crio-babe2b86473c7e1080573fe64cab36fa7e500588ae62c608abf02503e207c883 WatchSource:0}: Error finding container babe2b86473c7e1080573fe64cab36fa7e500588ae62c608abf02503e207c883: Status 404 returned error can't find the container with id babe2b86473c7e1080573fe64cab36fa7e500588ae62c608abf02503e207c883 Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.258441 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:02 crc kubenswrapper[4906]: E0227 08:32:02.258681 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:02.758639982 +0000 UTC m=+221.153041592 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.259010 4906 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.259272 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:02 crc kubenswrapper[4906]: E0227 08:32:02.259953 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:02.75994101 +0000 UTC m=+221.154342610 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.281310 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dkx2c"] Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.363192 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:02 crc kubenswrapper[4906]: E0227 08:32:02.363681 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:02.863655397 +0000 UTC m=+221.258057007 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.465361 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.465747 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.465788 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:32:02 crc kubenswrapper[4906]: E0227 08:32:02.466401 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:02.966376426 +0000 UTC m=+221.360778096 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.476999 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.501209 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.543146 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pqnpm"] Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.570253 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:02 crc kubenswrapper[4906]: E0227 08:32:02.571302 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:03.071263877 +0000 UTC m=+221.465665497 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.571620 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.571679 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.571762 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.571817 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs\") pod \"network-metrics-daemon-6rvgh\" (UID: \"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\") " pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:32:02 crc kubenswrapper[4906]: E0227 08:32:02.580198 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:03.080176091 +0000 UTC m=+221.474577701 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.580326 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-l9pjw" podStartSLOduration=174.580294234 podStartE2EDuration="2m54.580294234s" podCreationTimestamp="2026-02-27 08:29:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:02.551520384 +0000 UTC m=+220.945921994" watchObservedRunningTime="2026-02-27 08:32:02.580294234 +0000 UTC m=+220.974695844" Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.582771 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.584826 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9bb2ded7-f8fe-4978-81cd-08cafe0fe124-metrics-certs\") pod \"network-metrics-daemon-6rvgh\" (UID: \"9bb2ded7-f8fe-4978-81cd-08cafe0fe124\") " pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.597930 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.643828 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-dn92s"] Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.680873 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.682046 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:02 crc kubenswrapper[4906]: E0227 08:32:02.682496 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:03.182466218 +0000 UTC m=+221.576867978 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.693557 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh"] Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.696818 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx"] Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.698310 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.714258 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.722332 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p78ck"] Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.733746 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-b6rvf"] Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.761238 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-ppcv6"] Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.774463 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-6rvgh" Feb 27 08:32:02 crc kubenswrapper[4906]: W0227 08:32:02.790695 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod739fb53a_c353_4113_9f21_062b6580a184.slice/crio-957c2df71b8400755f9a57762177d510ed93466d7a4e76a3f0c1dd534e967206 WatchSource:0}: Error finding container 957c2df71b8400755f9a57762177d510ed93466d7a4e76a3f0c1dd534e967206: Status 404 returned error can't find the container with id 957c2df71b8400755f9a57762177d510ed93466d7a4e76a3f0c1dd534e967206 Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.793404 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:02 crc kubenswrapper[4906]: E0227 08:32:02.796226 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:03.296207821 +0000 UTC m=+221.690609431 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:02 crc kubenswrapper[4906]: W0227 08:32:02.798000 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6357283f_dd75_4101_8a3c_24fd81f0a991.slice/crio-95dee3d6733d6c7c0fdb7c61b8c9755c2a79980f1fe166af3b041c7ce78c2403 WatchSource:0}: Error finding container 95dee3d6733d6c7c0fdb7c61b8c9755c2a79980f1fe166af3b041c7ce78c2403: Status 404 returned error can't find the container with id 95dee3d6733d6c7c0fdb7c61b8c9755c2a79980f1fe166af3b041c7ce78c2403 Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.803294 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536350-85r2j"] Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.894445 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:02 crc kubenswrapper[4906]: E0227 08:32:02.895004 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:03.394982848 +0000 UTC m=+221.789384458 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:02 crc kubenswrapper[4906]: W0227 08:32:02.916580 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod20abae03_4ab8_4e0e_91f2_f4ec1bfc9529.slice/crio-66c3df429c817810b5e850d7f5e9e1d9eabbc9a494a9398778ce96990e484184 WatchSource:0}: Error finding container 66c3df429c817810b5e850d7f5e9e1d9eabbc9a494a9398778ce96990e484184: Status 404 returned error can't find the container with id 66c3df429c817810b5e850d7f5e9e1d9eabbc9a494a9398778ce96990e484184 Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.937485 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Feb 27 08:32:02 crc kubenswrapper[4906]: I0227 08:32:02.938217 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.005179 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:03 crc kubenswrapper[4906]: W0227 08:32:03.006863 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf743212c_ed63_408e_8063_ed04c8a7a1a9.slice/crio-80d44b9ef76d050e594c6e683d32cf61ac3d34056278a78e82f4ffdba78def70 WatchSource:0}: Error finding container 80d44b9ef76d050e594c6e683d32cf61ac3d34056278a78e82f4ffdba78def70: Status 404 returned error can't find the container with id 80d44b9ef76d050e594c6e683d32cf61ac3d34056278a78e82f4ffdba78def70 Feb 27 08:32:03 crc kubenswrapper[4906]: E0227 08:32:03.006902 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:03.506862958 +0000 UTC m=+221.901264578 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.020388 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-925p5" podStartSLOduration=175.020370003 podStartE2EDuration="2m55.020370003s" podCreationTimestamp="2026-02-27 08:29:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:02.981618728 +0000 UTC m=+221.376020338" watchObservedRunningTime="2026-02-27 08:32:03.020370003 +0000 UTC m=+221.414771613" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.022422 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" podStartSLOduration=175.022413302 podStartE2EDuration="2m55.022413302s" podCreationTimestamp="2026-02-27 08:29:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:03.019697714 +0000 UTC m=+221.414099334" watchObservedRunningTime="2026-02-27 08:32:03.022413302 +0000 UTC m=+221.416814912" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.055803 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-knttz" podStartSLOduration=174.055781503 podStartE2EDuration="2m54.055781503s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:03.052406937 +0000 UTC m=+221.446808547" watchObservedRunningTime="2026-02-27 08:32:03.055781503 +0000 UTC m=+221.450183113" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.106761 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:03 crc kubenswrapper[4906]: E0227 08:32:03.108082 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:03.608059664 +0000 UTC m=+222.002461274 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.113813 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" podStartSLOduration=174.113783867 podStartE2EDuration="2m54.113783867s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:03.081083664 +0000 UTC m=+221.475485284" watchObservedRunningTime="2026-02-27 08:32:03.113783867 +0000 UTC m=+221.508185477" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.191909 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-bm27t" podStartSLOduration=174.191862573 podStartE2EDuration="2m54.191862573s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:03.141153057 +0000 UTC m=+221.535554677" watchObservedRunningTime="2026-02-27 08:32:03.191862573 +0000 UTC m=+221.586264183" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.213479 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:03 crc kubenswrapper[4906]: E0227 08:32:03.214671 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:03.714624402 +0000 UTC m=+222.109026012 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.245283 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s8vkh" event={"ID":"8be167c5-1053-42f7-ae0a-11b4fc0d8333","Type":"ContainerStarted","Data":"934769a6a14325f5eebc7e228c81b3ee5cedac3c4a2cb3c2c553048a1dc43566"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.246652 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s8vkh" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.249002 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-79trw" event={"ID":"05f137f2-dd6a-4dc0-b02d-31e9b81d2a8d","Type":"ContainerStarted","Data":"be2ab74f1f4199140f67908a481af852ebbbf8bfe4ff697be4cf9096731e0724"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.250912 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-q826k" event={"ID":"75b3451d-2451-4a02-a510-fa27d6ea841a","Type":"ContainerStarted","Data":"1c21671a9ae19235a192b499b9d88a427cb57d97ea88ae52e4ec880995716017"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.254509 4906 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-s8vkh container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.37:8443/healthz\": dial tcp 10.217.0.37:8443: connect: connection refused" start-of-body= Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.254573 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s8vkh" podUID="8be167c5-1053-42f7-ae0a-11b4fc0d8333" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.37:8443/healthz\": dial tcp 10.217.0.37:8443: connect: connection refused" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.273300 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-dn92s" event={"ID":"6357283f-dd75-4101-8a3c-24fd81f0a991","Type":"ContainerStarted","Data":"95dee3d6733d6c7c0fdb7c61b8c9755c2a79980f1fe166af3b041c7ce78c2403"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.279561 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-6lqtc" podStartSLOduration=174.279534893 podStartE2EDuration="2m54.279534893s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:03.277607928 +0000 UTC m=+221.672009538" watchObservedRunningTime="2026-02-27 08:32:03.279534893 +0000 UTC m=+221.673936503" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.304470 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-tns4l" podStartSLOduration=174.304439403 podStartE2EDuration="2m54.304439403s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:03.302674613 +0000 UTC m=+221.697076243" watchObservedRunningTime="2026-02-27 08:32:03.304439403 +0000 UTC m=+221.698841033" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.322141 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:03 crc kubenswrapper[4906]: E0227 08:32:03.323198 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:03.823163727 +0000 UTC m=+222.217565327 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.325090 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-nd8mn" event={"ID":"c1d09410-c54c-478a-a350-58f3310099af","Type":"ContainerStarted","Data":"6ee5b8fa3055442818739626adb855314820ce8e139382c5f579873324c51483"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.345055 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-dq899" podStartSLOduration=174.345040241 podStartE2EDuration="2m54.345040241s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:03.343635661 +0000 UTC m=+221.738037271" watchObservedRunningTime="2026-02-27 08:32:03.345040241 +0000 UTC m=+221.739441851" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.382604 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-nd8mn" podStartSLOduration=5.382582652 podStartE2EDuration="5.382582652s" podCreationTimestamp="2026-02-27 08:31:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:03.378932168 +0000 UTC m=+221.773333788" watchObservedRunningTime="2026-02-27 08:32:03.382582652 +0000 UTC m=+221.776984262" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.394607 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536350-85r2j" event={"ID":"f743212c-ed63-408e-8063-ed04c8a7a1a9","Type":"ContainerStarted","Data":"80d44b9ef76d050e594c6e683d32cf61ac3d34056278a78e82f4ffdba78def70"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.403050 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pb9n9" event={"ID":"ce2700f0-be98-4622-a729-99b696e7ecb9","Type":"ContainerStarted","Data":"4ab0bbc556da5eb4bf5f39ee9c6709016d651b12ea4907a8fab34b80b64f8e7d"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.425949 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" event={"ID":"739fb53a-c353-4113-9f21-062b6580a184","Type":"ContainerStarted","Data":"957c2df71b8400755f9a57762177d510ed93466d7a4e76a3f0c1dd534e967206"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.426820 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:03 crc kubenswrapper[4906]: E0227 08:32:03.427210 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:03.927197174 +0000 UTC m=+222.321598784 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.433195 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p78ck" event={"ID":"d217f1ee-f917-4ac1-bf8d-c8a011d42ebc","Type":"ContainerStarted","Data":"a7899dab24f0f4eb31bf33ffd3ef7768b33f466d0faebd599dbe7cf0a3c9af6f"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.457154 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wvwrb" event={"ID":"dbd83c42-7c7f-4257-9105-58f2e2fae841","Type":"ContainerStarted","Data":"b289927f21434eb9398a567a934ac4ee05a8f387764511b22b3a568f3eb36e3f"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.458578 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s8vkh" podStartSLOduration=174.458546138 podStartE2EDuration="2m54.458546138s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:03.455868501 +0000 UTC m=+221.850270111" watchObservedRunningTime="2026-02-27 08:32:03.458546138 +0000 UTC m=+221.852947748" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.469167 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh" event={"ID":"20abae03-4ab8-4e0e-91f2-f4ec1bfc9529","Type":"ContainerStarted","Data":"66c3df429c817810b5e850d7f5e9e1d9eabbc9a494a9398778ce96990e484184"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.495659 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-r8pwh" event={"ID":"ce294532-f4f2-46cf-b046-e81aa81f0f0c","Type":"ContainerStarted","Data":"12dbd85d430ccbe729daf47ac409c222373dd66161ee315da2c7a38ff2a62dfa"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.513559 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-b6rvf" event={"ID":"7f5ee13e-99ff-44b5-a302-dcbaf840fe79","Type":"ContainerStarted","Data":"e9fd22954ca180139d6a30972c8151c7e0ce28a317c3a0fb4b0f8818550a9737"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.525105 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-vkqzc" event={"ID":"ffd1a145-703e-452a-b666-41c9c837985d","Type":"ContainerStarted","Data":"95cd573f9f939cf846a6a020247262ce4587fb30eb81f384d28b6523fa7e3e2e"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.529192 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:03 crc kubenswrapper[4906]: E0227 08:32:03.529349 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:04.029320786 +0000 UTC m=+222.423722396 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.529534 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:03 crc kubenswrapper[4906]: E0227 08:32:03.530710 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:04.030696105 +0000 UTC m=+222.425097715 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.557319 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p" event={"ID":"84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1","Type":"ContainerStarted","Data":"7ea88344978ed4c0d093ee26818dec61d08d55f2a28a95d570e7a97385254384"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.619792 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" event={"ID":"d8e9a89a-0411-4456-9637-08712b004662","Type":"ContainerStarted","Data":"d036ddf731d0a170d7f275a214d23aede3f67832f0fdfc55e52ffd2294102ddb"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.620268 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.630071 4906 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-b7qtg container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.630134 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" podUID="d8e9a89a-0411-4456-9637-08712b004662" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.633165 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.637674 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dkx2c" event={"ID":"917d272c-e31b-420f-9408-06f7130b76ba","Type":"ContainerStarted","Data":"7fbd9b6e5ebc391f7fb4c38654ee75d4c7c14e4e452269a52c86d53c9282d9b7"} Feb 27 08:32:03 crc kubenswrapper[4906]: E0227 08:32:03.638010 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:04.137986444 +0000 UTC m=+222.532388054 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.670010 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ppcv6" event={"ID":"74d50313-482b-4d26-acd9-ce88a2c53093","Type":"ContainerStarted","Data":"9bbdea0460f0cb35842360e29aa82acfd8e63b32a2dabe6eb41a8e525ce43048"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.676258 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" event={"ID":"f5b83410-ebfb-4bd0-a32e-b51029bd42cf","Type":"ContainerStarted","Data":"f1a2b443964260de7219bacadd7c3b5ae1b3c12921acfc84a2d7d5814d3736e3"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.676326 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" event={"ID":"f5b83410-ebfb-4bd0-a32e-b51029bd42cf","Type":"ContainerStarted","Data":"6f57d5f07063fb51d64bcf92a6e2246bb105cba3029a2b075f52f9f6095579eb"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.677324 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.692141 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536352-8fqhc" event={"ID":"582fc06a-0d1d-4260-a91f-af317ab278d9","Type":"ContainerStarted","Data":"babe2b86473c7e1080573fe64cab36fa7e500588ae62c608abf02503e207c883"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.695306 4906 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-4655n container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.23:5443/healthz\": dial tcp 10.217.0.23:5443: connect: connection refused" start-of-body= Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.695590 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" podUID="f5b83410-ebfb-4bd0-a32e-b51029bd42cf" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.23:5443/healthz\": dial tcp 10.217.0.23:5443: connect: connection refused" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.706009 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx" event={"ID":"ac646565-cd5f-405a-ad92-3f2afb51d5c8","Type":"ContainerStarted","Data":"d18be7cb7825e3c3141d7415901e36856b2af5f1a2f6afb7a3037bee3e70fa7c"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.709988 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2" event={"ID":"0f7a799f-448a-414c-b8a0-8bd5f016e5c1","Type":"ContainerStarted","Data":"f7b44e8022f9f636b47d71d3d1f64938b791bdef05e827f1291d43433efe5df9"} Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.711492 4906 patch_prober.go:28] interesting pod/console-operator-58897d9998-dq899 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.19:8443/readyz\": dial tcp 10.217.0.19:8443: connect: connection refused" start-of-body= Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.711560 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-dq899" podUID="09bfe857-0a67-4902-a9b9-5738a2074657" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.19:8443/readyz\": dial tcp 10.217.0.19:8443: connect: connection refused" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.711489 4906 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-qvd5m container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.711825 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" podUID="390464d8-fc1d-443a-85f7-7164ac4e2d05" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.724674 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.747702 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:03 crc kubenswrapper[4906]: E0227 08:32:03.753205 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:04.253183579 +0000 UTC m=+222.647585249 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.860475 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:03 crc kubenswrapper[4906]: E0227 08:32:03.861129 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:04.361086606 +0000 UTC m=+222.755488216 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.861411 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:03 crc kubenswrapper[4906]: E0227 08:32:03.863629 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:04.363612958 +0000 UTC m=+222.758014568 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.897790 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-pb9n9" podStartSLOduration=175.897769732 podStartE2EDuration="2m55.897769732s" podCreationTimestamp="2026-02-27 08:29:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:03.886666886 +0000 UTC m=+222.281068516" watchObservedRunningTime="2026-02-27 08:32:03.897769732 +0000 UTC m=+222.292171342" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.954870 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 27 08:32:03 crc kubenswrapper[4906]: [-]has-synced failed: reason withheld Feb 27 08:32:03 crc kubenswrapper[4906]: [+]process-running ok Feb 27 08:32:03 crc kubenswrapper[4906]: healthz check failed Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.954967 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.962922 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:03 crc kubenswrapper[4906]: E0227 08:32:03.963439 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:04.463417044 +0000 UTC m=+222.857818654 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:03 crc kubenswrapper[4906]: I0227 08:32:03.985979 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-cr7bl" podStartSLOduration=174.985950137 podStartE2EDuration="2m54.985950137s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:03.985137043 +0000 UTC m=+222.379538643" watchObservedRunningTime="2026-02-27 08:32:03.985950137 +0000 UTC m=+222.380351757" Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.006518 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-vkqzc" podStartSLOduration=6.006489922 podStartE2EDuration="6.006489922s" podCreationTimestamp="2026-02-27 08:31:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:03.92609445 +0000 UTC m=+222.320496070" watchObservedRunningTime="2026-02-27 08:32:04.006489922 +0000 UTC m=+222.400891532" Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.028283 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" podStartSLOduration=175.028260792 podStartE2EDuration="2m55.028260792s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:04.025485523 +0000 UTC m=+222.419887133" watchObservedRunningTime="2026-02-27 08:32:04.028260792 +0000 UTC m=+222.422662402" Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.066531 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:04 crc kubenswrapper[4906]: E0227 08:32:04.066896 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:04.566856053 +0000 UTC m=+222.961257663 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.171412 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" podStartSLOduration=175.171388093 podStartE2EDuration="2m55.171388093s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:04.105632318 +0000 UTC m=+222.500033928" watchObservedRunningTime="2026-02-27 08:32:04.171388093 +0000 UTC m=+222.565789703" Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.174412 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:04 crc kubenswrapper[4906]: E0227 08:32:04.175021 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:04.675001567 +0000 UTC m=+223.069403177 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.177934 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-r8pwh" podStartSLOduration=175.177907439 podStartE2EDuration="2m55.177907439s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:04.168974865 +0000 UTC m=+222.563376485" watchObservedRunningTime="2026-02-27 08:32:04.177907439 +0000 UTC m=+222.572309059" Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.276784 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:04 crc kubenswrapper[4906]: E0227 08:32:04.277775 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:04.777756477 +0000 UTC m=+223.172158087 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.377708 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:04 crc kubenswrapper[4906]: E0227 08:32:04.378007 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:04.877986305 +0000 UTC m=+223.272387915 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.479793 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:04 crc kubenswrapper[4906]: E0227 08:32:04.480306 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:04.980291612 +0000 UTC m=+223.374693222 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.482230 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-6rvgh"] Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.582190 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:04 crc kubenswrapper[4906]: E0227 08:32:04.583104 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:05.083080823 +0000 UTC m=+223.477482443 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.687701 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:04 crc kubenswrapper[4906]: E0227 08:32:04.688173 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:05.188156159 +0000 UTC m=+223.582557769 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.756109 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-hkv6l" event={"ID":"f3cc181a-c108-493a-87fa-9bf76f81b062","Type":"ContainerStarted","Data":"bbeba7f0c4895b9188071d9fede0c5b5988e09953b165a0ea4235f4fd24250d3"} Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.757625 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-hkv6l" Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.767407 4906 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkv6l container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.767466 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.787300 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" event={"ID":"fd02452f-1a87-43a5-8f32-3b63d9f522e3","Type":"ContainerStarted","Data":"71f06323536e20061f58fdb98195747e7be06b8b6ff5f17f3d4a16a8b6b0242a"} Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.789179 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:04 crc kubenswrapper[4906]: E0227 08:32:04.789586 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:05.289564861 +0000 UTC m=+223.683966471 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.807709 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-hkv6l" podStartSLOduration=175.807687468 podStartE2EDuration="2m55.807687468s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:04.806820243 +0000 UTC m=+223.201221863" watchObservedRunningTime="2026-02-27 08:32:04.807687468 +0000 UTC m=+223.202089078" Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.812758 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"6c436505f468aade0557f1c470102e16d26f5861e0ae8613e6f281690381ed35"} Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.828577 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mmsmb" event={"ID":"9c399ec5-479b-4e68-b556-691d6b9dd26f","Type":"ContainerStarted","Data":"485a9ef095d060bfc61d1a9c89d25058caebb6f798794efc7fb12598f6676ffe"} Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.831927 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-q826k" event={"ID":"75b3451d-2451-4a02-a510-fa27d6ea841a","Type":"ContainerStarted","Data":"4cd180dfc5602e3ee47d84f8c0343c36fe79e69c13bf63a582c499ca8b615665"} Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.834129 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-hhgws" event={"ID":"ce4d5219-eab9-4fda-bf96-5a7ef9056300","Type":"ContainerStarted","Data":"b27fac33660835f4532ac4f0173361e789486be25c7f01749928d33d1819777e"} Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.835029 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"1989c4191d79f56f453803ed1202c05a9d3c4627298779768b81cf7382ab2040"} Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.836150 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wvwrb" event={"ID":"dbd83c42-7c7f-4257-9105-58f2e2fae841","Type":"ContainerStarted","Data":"cc99d0a76738ae1741adaf5bd9480ffdeedb51693d89dae642156f2346a27e7a"} Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.841568 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xx8qm" event={"ID":"a861f5dc-100c-443f-ab72-ecfe71895998","Type":"ContainerStarted","Data":"3f88d9e398c9fe052cd497717ad278ef284fd1dcef32b5593f93ab22d94702b1"} Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.848130 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-rk76z" event={"ID":"144c52bf-12d2-4b24-baf9-f8f2b0a587e1","Type":"ContainerStarted","Data":"ae87a65cd2b7447bfa24cf4930c95c35fc404d5ca5a962b753eef4efcc179d5f"} Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.857950 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-wvwrb" podStartSLOduration=175.85792519 podStartE2EDuration="2m55.85792519s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:04.855539782 +0000 UTC m=+223.249941392" watchObservedRunningTime="2026-02-27 08:32:04.85792519 +0000 UTC m=+223.252326800" Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.869053 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xl55p" event={"ID":"543292c6-d79b-4198-932b-fdc68dcbbde5","Type":"ContainerStarted","Data":"ba707f64a9e7ce799cbbd190c6a934f61ad05d00047404b9492a133a50589a2e"} Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.869947 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xl55p" Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.874046 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" event={"ID":"8a68474e-6d7c-4b29-89e0-90d25dbbf86e","Type":"ContainerStarted","Data":"16086173a4f5352b4c11e0d57a9930e0d953ac2f19bbcd98b241c370d7ca628e"} Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.875870 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" event={"ID":"9bb2ded7-f8fe-4978-81cd-08cafe0fe124","Type":"ContainerStarted","Data":"98c5e9439ed4ba53040e3209c4b25449a74c9d8e14bf97d2b3f81d4fe8ce6d1b"} Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.877433 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-r4ppj" event={"ID":"16f8b49f-8dc0-4aa5-bb80-9642e2688141","Type":"ContainerStarted","Data":"4b98a6280a58454edb52411c945d511ee0597912b98913c57989f14945950947"} Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.879269 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2vdrh" event={"ID":"ef6057ac-2815-4543-ba7c-ae864cc3a830","Type":"ContainerStarted","Data":"b6b5150efd0451845e6063b3716ff0f74378ab3db03404dc42ff37ba7139437d"} Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.880369 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"503344a76b281b8feebacc3c4002223f83fe93f06e8eb49eba5862b4f4c2b584"} Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.881314 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-vkqzc" event={"ID":"ffd1a145-703e-452a-b666-41c9c837985d","Type":"ContainerStarted","Data":"35df12e42587bd74227807267187834fff232157d6c7380af2290780a07e45c7"} Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.892671 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.894082 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p" event={"ID":"84b6fd39-2c2b-41ef-aa4e-ebc67e829fc1","Type":"ContainerStarted","Data":"b23179220ac70ad0de9d6ad8f9b171e8f61f9bcd3fb0b83086cfe0ca36e92d39"} Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.892681 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-rk76z" podStartSLOduration=175.89264723 podStartE2EDuration="2m55.89264723s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:04.883075357 +0000 UTC m=+223.277476967" watchObservedRunningTime="2026-02-27 08:32:04.89264723 +0000 UTC m=+223.287048840" Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.895610 4906 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-qvd5m container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.895751 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" podUID="390464d8-fc1d-443a-85f7-7164ac4e2d05" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.896015 4906 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-4655n container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.23:5443/healthz\": dial tcp 10.217.0.23:5443: connect: connection refused" start-of-body= Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.896073 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" podUID="f5b83410-ebfb-4bd0-a32e-b51029bd42cf" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.23:5443/healthz\": dial tcp 10.217.0.23:5443: connect: connection refused" Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.896400 4906 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-b7qtg container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Feb 27 08:32:04 crc kubenswrapper[4906]: E0227 08:32:04.896513 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:05.3964973 +0000 UTC m=+223.790898910 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.896606 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" podUID="d8e9a89a-0411-4456-9637-08712b004662" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.904228 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-s8vkh" Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.919531 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-xx8qm" podStartSLOduration=175.919509206 podStartE2EDuration="2m55.919509206s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:04.918108586 +0000 UTC m=+223.312510196" watchObservedRunningTime="2026-02-27 08:32:04.919509206 +0000 UTC m=+223.313910816" Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.939183 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-2vdrh" podStartSLOduration=175.939163927 podStartE2EDuration="2m55.939163927s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:04.939017632 +0000 UTC m=+223.333419262" watchObservedRunningTime="2026-02-27 08:32:04.939163927 +0000 UTC m=+223.333565537" Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.948239 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 27 08:32:04 crc kubenswrapper[4906]: [-]has-synced failed: reason withheld Feb 27 08:32:04 crc kubenswrapper[4906]: [+]process-running ok Feb 27 08:32:04 crc kubenswrapper[4906]: healthz check failed Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.948700 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.994188 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:04 crc kubenswrapper[4906]: E0227 08:32:04.997095 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:05.497054447 +0000 UTC m=+223.891456237 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:04 crc kubenswrapper[4906]: I0227 08:32:04.999174 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-r4ppj" podStartSLOduration=175.999151947 podStartE2EDuration="2m55.999151947s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:04.997622494 +0000 UTC m=+223.392024094" watchObservedRunningTime="2026-02-27 08:32:04.999151947 +0000 UTC m=+223.393553557" Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.057793 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xl55p" podStartSLOduration=176.057773429 podStartE2EDuration="2m56.057773429s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:05.02975163 +0000 UTC m=+223.424153250" watchObservedRunningTime="2026-02-27 08:32:05.057773429 +0000 UTC m=+223.452175039" Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.058240 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" podStartSLOduration=176.058233372 podStartE2EDuration="2m56.058233372s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:05.054444084 +0000 UTC m=+223.448845704" watchObservedRunningTime="2026-02-27 08:32:05.058233372 +0000 UTC m=+223.452634982" Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.097231 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:05 crc kubenswrapper[4906]: E0227 08:32:05.097591 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:05.597576854 +0000 UTC m=+223.991978464 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.191039 4906 ???:1] "http: TLS handshake error from 192.168.126.11:50260: no serving certificate available for the kubelet" Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.198542 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:05 crc kubenswrapper[4906]: E0227 08:32:05.198761 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:05.698722788 +0000 UTC m=+224.093124408 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.199287 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:05 crc kubenswrapper[4906]: E0227 08:32:05.199751 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:05.699739167 +0000 UTC m=+224.094140777 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.269450 4906 ???:1] "http: TLS handshake error from 192.168.126.11:50262: no serving certificate available for the kubelet" Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.300230 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:05 crc kubenswrapper[4906]: E0227 08:32:05.300363 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:05.800336315 +0000 UTC m=+224.194737925 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.300608 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:05 crc kubenswrapper[4906]: E0227 08:32:05.301093 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:05.801081157 +0000 UTC m=+224.195482767 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.380206 4906 ???:1] "http: TLS handshake error from 192.168.126.11:50264: no serving certificate available for the kubelet" Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.402966 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:05 crc kubenswrapper[4906]: E0227 08:32:05.403414 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:05.903394254 +0000 UTC m=+224.297795864 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.474424 4906 ???:1] "http: TLS handshake error from 192.168.126.11:50266: no serving certificate available for the kubelet" Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.505918 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:05 crc kubenswrapper[4906]: E0227 08:32:05.506367 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:06.00635283 +0000 UTC m=+224.400754440 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.507630 4906 ???:1] "http: TLS handshake error from 192.168.126.11:50268: no serving certificate available for the kubelet" Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.585409 4906 ???:1] "http: TLS handshake error from 192.168.126.11:50282: no serving certificate available for the kubelet" Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.607001 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:05 crc kubenswrapper[4906]: E0227 08:32:05.607190 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:06.107148594 +0000 UTC m=+224.501550214 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.607344 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:05 crc kubenswrapper[4906]: E0227 08:32:05.607749 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:06.107737731 +0000 UTC m=+224.502139341 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.710162 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:05 crc kubenswrapper[4906]: E0227 08:32:05.710784 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:06.210762359 +0000 UTC m=+224.605163969 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.792496 4906 ???:1] "http: TLS handshake error from 192.168.126.11:50292: no serving certificate available for the kubelet" Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.812657 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:05 crc kubenswrapper[4906]: E0227 08:32:05.813097 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:06.313081317 +0000 UTC m=+224.707482927 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.914235 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:05 crc kubenswrapper[4906]: E0227 08:32:05.914780 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:06.414754096 +0000 UTC m=+224.809155706 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.918454 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dkx2c" event={"ID":"917d272c-e31b-420f-9408-06f7130b76ba","Type":"ContainerStarted","Data":"5a468f0498c7496425de9ed5266aa78fc54b0bd537ceba96259c73fc4ed0476b"} Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.937560 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ppcv6" event={"ID":"74d50313-482b-4d26-acd9-ce88a2c53093","Type":"ContainerStarted","Data":"bbf82ba78af49aff6c3747a3f65298a1902fb00015eee09185f8e3f347364a18"} Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.944148 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 27 08:32:05 crc kubenswrapper[4906]: [-]has-synced failed: reason withheld Feb 27 08:32:05 crc kubenswrapper[4906]: [+]process-running ok Feb 27 08:32:05 crc kubenswrapper[4906]: healthz check failed Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.944262 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.951197 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p78ck" event={"ID":"d217f1ee-f917-4ac1-bf8d-c8a011d42ebc","Type":"ContainerStarted","Data":"4fc3baf8bebbe25b9eafb5374bd005cb876fabc46a0209e7c44bf28be51a8dd5"} Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.966262 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-dkx2c" podStartSLOduration=176.966237774 podStartE2EDuration="2m56.966237774s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:05.963760013 +0000 UTC m=+224.358161633" watchObservedRunningTime="2026-02-27 08:32:05.966237774 +0000 UTC m=+224.360639384" Feb 27 08:32:05 crc kubenswrapper[4906]: I0227 08:32:05.988580 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" event={"ID":"9bb2ded7-f8fe-4978-81cd-08cafe0fe124","Type":"ContainerStarted","Data":"199c1ddbf3bc95fa7bab90c061e7daf092c9191693583d3b113bddfdf0279d1c"} Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.004299 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-p78ck" podStartSLOduration=177.004280649 podStartE2EDuration="2m57.004280649s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:05.998328579 +0000 UTC m=+224.392730189" watchObservedRunningTime="2026-02-27 08:32:06.004280649 +0000 UTC m=+224.398682259" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.012403 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"64b97ec761b3ba2a50e121364e4e02882b98d498caf59b73a2ff686b0e50483d"} Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.016286 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:06 crc kubenswrapper[4906]: E0227 08:32:06.017411 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:06.517383972 +0000 UTC m=+224.911785612 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.053818 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-79trw" event={"ID":"05f137f2-dd6a-4dc0-b02d-31e9b81d2a8d","Type":"ContainerStarted","Data":"a76051da25284a13b53f2f868c260915a4c102168b3174dc514ce289842af964"} Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.056635 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx" event={"ID":"ac646565-cd5f-405a-ad92-3f2afb51d5c8","Type":"ContainerStarted","Data":"518463c7d5f4d22b76c4a2479bc7d8eb22106a8f02cce6f686e3bdb0081c951a"} Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.061277 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"019d2b6bf6205e59917341377b84dded7a000368758a332f1fd0cbad4dfda2fc"} Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.075839 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2" event={"ID":"0f7a799f-448a-414c-b8a0-8bd5f016e5c1","Type":"ContainerStarted","Data":"5697a6714707402b935251377e3142cf8948b15b7e108e98384b7de066483aac"} Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.075918 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2" event={"ID":"0f7a799f-448a-414c-b8a0-8bd5f016e5c1","Type":"ContainerStarted","Data":"ad2437406bfddcc106c045c95aea902f5371a5f9c395ed258e545613ab65d431"} Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.080930 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-79trw" podStartSLOduration=177.080901283 podStartE2EDuration="2m57.080901283s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:06.075127189 +0000 UTC m=+224.469528799" watchObservedRunningTime="2026-02-27 08:32:06.080901283 +0000 UTC m=+224.475302913" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.084305 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh" event={"ID":"20abae03-4ab8-4e0e-91f2-f4ec1bfc9529","Type":"ContainerStarted","Data":"20e04aaeade2a520326fbdbf9942a36820abb11b43e04d02768f4b76badace87"} Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.085366 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.092288 4906 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-m8zqh container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" start-of-body= Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.092353 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh" podUID="20abae03-4ab8-4e0e-91f2-f4ec1bfc9529" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.102663 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx" podStartSLOduration=126.102641193 podStartE2EDuration="2m6.102641193s" podCreationTimestamp="2026-02-27 08:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:06.09550517 +0000 UTC m=+224.489906780" watchObservedRunningTime="2026-02-27 08:32:06.102641193 +0000 UTC m=+224.497042803" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.102717 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"fd0edf8af7407e3ba63d45993e7bd9a3e6131e6dc172e2eeb280225309628351"} Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.102802 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.118196 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:06 crc kubenswrapper[4906]: E0227 08:32:06.118604 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:06.618576228 +0000 UTC m=+225.012977848 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.118693 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:06 crc kubenswrapper[4906]: E0227 08:32:06.120473 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:06.620459731 +0000 UTC m=+225.014861441 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.124246 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-dn92s" event={"ID":"6357283f-dd75-4101-8a3c-24fd81f0a991","Type":"ContainerStarted","Data":"a2b0b37b15e18ec2a17c363f05ff251a4227131f60b87c1f045e3e954214237a"} Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.129974 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-b6rvf" event={"ID":"7f5ee13e-99ff-44b5-a302-dcbaf840fe79","Type":"ContainerStarted","Data":"a5b127ae6cb218240cc58ee39cbb5cc4117453e67fee638f3f749a107d2fcd86"} Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.137762 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-q826k" event={"ID":"75b3451d-2451-4a02-a510-fa27d6ea841a","Type":"ContainerStarted","Data":"7f4bd9114e31262c993ff7ed6b6e638dcf02115012722e5da8d90177ad280791"} Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.144967 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" event={"ID":"739fb53a-c353-4113-9f21-062b6580a184","Type":"ContainerStarted","Data":"e6959c9b0e612b767afbb03e1d795075496bf62768e586a5174ad8286561587e"} Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.147300 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.150493 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-dn92s" podStartSLOduration=177.150474697 podStartE2EDuration="2m57.150474697s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:06.150118007 +0000 UTC m=+224.544519617" watchObservedRunningTime="2026-02-27 08:32:06.150474697 +0000 UTC m=+224.544876317" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.162389 4906 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-pqnpm container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.162492 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" podUID="739fb53a-c353-4113-9f21-062b6580a184" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.174857 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mmsmb" event={"ID":"9c399ec5-479b-4e68-b556-691d6b9dd26f","Type":"ContainerStarted","Data":"98286259fb3975e5232d011a58c5f2e9abaffc6b949ee206a269a484315b435e"} Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.180056 4906 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkv6l container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.180138 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.181691 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mmsmb" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.182937 4906 ???:1] "http: TLS handshake error from 192.168.126.11:50304: no serving certificate available for the kubelet" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.219549 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:06 crc kubenswrapper[4906]: E0227 08:32:06.221835 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:06.721803481 +0000 UTC m=+225.116205081 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.222606 4906 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-xl55p container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" start-of-body= Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.222677 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xl55p" podUID="543292c6-d79b-4198-932b-fdc68dcbbde5" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.10:8443/healthz\": dial tcp 10.217.0.10:8443: connect: connection refused" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.247695 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qvd5m"] Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.248081 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" podUID="390464d8-fc1d-443a-85f7-7164ac4e2d05" containerName="controller-manager" containerID="cri-o://98cfc4860b8cf1fb3a8d786f9622628bd2afee5c5a331adcaebbd8356e896bec" gracePeriod=30 Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.291362 4906 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-qvd5m container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:8443/healthz\": EOF" start-of-body= Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.291428 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" podUID="390464d8-fc1d-443a-85f7-7164ac4e2d05" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.18:8443/healthz\": EOF" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.309457 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh" podStartSLOduration=177.30943969 podStartE2EDuration="2m57.30943969s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:06.268368249 +0000 UTC m=+224.662769859" watchObservedRunningTime="2026-02-27 08:32:06.30943969 +0000 UTC m=+224.703841300" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.321849 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg"] Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.322065 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.322146 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" podUID="d8e9a89a-0411-4456-9637-08712b004662" containerName="route-controller-manager" containerID="cri-o://d036ddf731d0a170d7f275a214d23aede3f67832f0fdfc55e52ffd2294102ddb" gracePeriod=30 Feb 27 08:32:06 crc kubenswrapper[4906]: E0227 08:32:06.322573 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:06.822557284 +0000 UTC m=+225.216958894 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.373851 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" podStartSLOduration=177.373830906 podStartE2EDuration="2m57.373830906s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:06.336138152 +0000 UTC m=+224.730539772" watchObservedRunningTime="2026-02-27 08:32:06.373830906 +0000 UTC m=+224.768232506" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.412134 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-b6rvf" podStartSLOduration=177.412086137 podStartE2EDuration="2m57.412086137s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:06.376270726 +0000 UTC m=+224.770672346" watchObservedRunningTime="2026-02-27 08:32:06.412086137 +0000 UTC m=+224.806487747" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.423717 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:06 crc kubenswrapper[4906]: E0227 08:32:06.424254 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:06.924218653 +0000 UTC m=+225.318620443 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.443616 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-62r9p" podStartSLOduration=177.443585935 podStartE2EDuration="2m57.443585935s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:06.411992205 +0000 UTC m=+224.806393845" watchObservedRunningTime="2026-02-27 08:32:06.443585935 +0000 UTC m=+224.837987545" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.444534 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mmsmb" podStartSLOduration=177.444525312 podStartE2EDuration="2m57.444525312s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:06.440559569 +0000 UTC m=+224.834961179" watchObservedRunningTime="2026-02-27 08:32:06.444525312 +0000 UTC m=+224.838926922" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.481135 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-q826k" podStartSLOduration=177.481113166 podStartE2EDuration="2m57.481113166s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:06.477368129 +0000 UTC m=+224.871769749" watchObservedRunningTime="2026-02-27 08:32:06.481113166 +0000 UTC m=+224.875514776" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.530095 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:06 crc kubenswrapper[4906]: E0227 08:32:06.530653 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:07.030637558 +0000 UTC m=+225.425039168 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.603257 4906 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-b7qtg container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": read tcp 10.217.0.2:55212->10.217.0.6:8443: read: connection reset by peer" start-of-body= Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.603345 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" podUID="d8e9a89a-0411-4456-9637-08712b004662" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": read tcp 10.217.0.2:55212->10.217.0.6:8443: read: connection reset by peer" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.632601 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:06 crc kubenswrapper[4906]: E0227 08:32:06.632771 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:07.132740819 +0000 UTC m=+225.527142429 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.633354 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:06 crc kubenswrapper[4906]: E0227 08:32:06.633923 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:07.133909093 +0000 UTC m=+225.528310703 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.734639 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:06 crc kubenswrapper[4906]: E0227 08:32:06.734862 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:07.23482618 +0000 UTC m=+225.629227790 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.735239 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:06 crc kubenswrapper[4906]: E0227 08:32:06.735620 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:07.235611153 +0000 UTC m=+225.630012763 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.836639 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:06 crc kubenswrapper[4906]: E0227 08:32:06.836804 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:07.336773017 +0000 UTC m=+225.731174627 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.837437 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:06 crc kubenswrapper[4906]: E0227 08:32:06.837861 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:07.337843858 +0000 UTC m=+225.732245468 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.903385 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-4655n" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.918065 4906 ???:1] "http: TLS handshake error from 192.168.126.11:50320: no serving certificate available for the kubelet" Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.944167 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:06 crc kubenswrapper[4906]: E0227 08:32:06.944729 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:07.444710285 +0000 UTC m=+225.839111895 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.958713 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 27 08:32:06 crc kubenswrapper[4906]: [-]has-synced failed: reason withheld Feb 27 08:32:06 crc kubenswrapper[4906]: [+]process-running ok Feb 27 08:32:06 crc kubenswrapper[4906]: healthz check failed Feb 27 08:32:06 crc kubenswrapper[4906]: I0227 08:32:06.958778 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.045641 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:07 crc kubenswrapper[4906]: E0227 08:32:07.046059 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:07.546030064 +0000 UTC m=+225.940431664 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.095208 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-6576b87f9c-b7qtg_d8e9a89a-0411-4456-9637-08712b004662/route-controller-manager/0.log" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.095302 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.146906 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d8e9a89a-0411-4456-9637-08712b004662-client-ca\") pod \"d8e9a89a-0411-4456-9637-08712b004662\" (UID: \"d8e9a89a-0411-4456-9637-08712b004662\") " Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.148219 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8e9a89a-0411-4456-9637-08712b004662-client-ca" (OuterVolumeSpecName: "client-ca") pod "d8e9a89a-0411-4456-9637-08712b004662" (UID: "d8e9a89a-0411-4456-9637-08712b004662"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.148696 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.148756 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8e9a89a-0411-4456-9637-08712b004662-serving-cert\") pod \"d8e9a89a-0411-4456-9637-08712b004662\" (UID: \"d8e9a89a-0411-4456-9637-08712b004662\") " Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.148788 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ntr47\" (UniqueName: \"kubernetes.io/projected/d8e9a89a-0411-4456-9637-08712b004662-kube-api-access-ntr47\") pod \"d8e9a89a-0411-4456-9637-08712b004662\" (UID: \"d8e9a89a-0411-4456-9637-08712b004662\") " Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.148845 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8e9a89a-0411-4456-9637-08712b004662-config\") pod \"d8e9a89a-0411-4456-9637-08712b004662\" (UID: \"d8e9a89a-0411-4456-9637-08712b004662\") " Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.150954 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8e9a89a-0411-4456-9637-08712b004662-config" (OuterVolumeSpecName: "config") pod "d8e9a89a-0411-4456-9637-08712b004662" (UID: "d8e9a89a-0411-4456-9637-08712b004662"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:32:07 crc kubenswrapper[4906]: E0227 08:32:07.152124 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:07.652089238 +0000 UTC m=+226.046490848 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.152237 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.152388 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8e9a89a-0411-4456-9637-08712b004662-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.152664 4906 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d8e9a89a-0411-4456-9637-08712b004662-client-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:07 crc kubenswrapper[4906]: E0227 08:32:07.156985 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:07.656961337 +0000 UTC m=+226.051362947 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.163420 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8e9a89a-0411-4456-9637-08712b004662-kube-api-access-ntr47" (OuterVolumeSpecName: "kube-api-access-ntr47") pod "d8e9a89a-0411-4456-9637-08712b004662" (UID: "d8e9a89a-0411-4456-9637-08712b004662"). InnerVolumeSpecName "kube-api-access-ntr47". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.173829 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8e9a89a-0411-4456-9637-08712b004662-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d8e9a89a-0411-4456-9637-08712b004662" (UID: "d8e9a89a-0411-4456-9637-08712b004662"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.202039 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-6rvgh" event={"ID":"9bb2ded7-f8fe-4978-81cd-08cafe0fe124","Type":"ContainerStarted","Data":"be31a394d55d037ab0b89076d1225640ed9936314588212fee683f8f228da74b"} Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.225568 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-6rvgh" podStartSLOduration=178.225550383 podStartE2EDuration="2m58.225550383s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:07.223247887 +0000 UTC m=+225.617649497" watchObservedRunningTime="2026-02-27 08:32:07.225550383 +0000 UTC m=+225.619951993" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.228389 4906 generic.go:334] "Generic (PLEG): container finished" podID="390464d8-fc1d-443a-85f7-7164ac4e2d05" containerID="98cfc4860b8cf1fb3a8d786f9622628bd2afee5c5a331adcaebbd8356e896bec" exitCode=0 Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.228484 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" event={"ID":"390464d8-fc1d-443a-85f7-7164ac4e2d05","Type":"ContainerDied","Data":"98cfc4860b8cf1fb3a8d786f9622628bd2afee5c5a331adcaebbd8356e896bec"} Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.247934 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" event={"ID":"1f4e9433-9a0f-4432-bfbe-b98fe89961bd","Type":"ContainerStarted","Data":"46b56fe8b2b1149303bd71038b219c7bb95e423f86206e9728f9006311c0d9b3"} Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.254539 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:07 crc kubenswrapper[4906]: E0227 08:32:07.254691 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:07.754664833 +0000 UTC m=+226.149066453 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.254736 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-6576b87f9c-b7qtg_d8e9a89a-0411-4456-9637-08712b004662/route-controller-manager/0.log" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.254777 4906 generic.go:334] "Generic (PLEG): container finished" podID="d8e9a89a-0411-4456-9637-08712b004662" containerID="d036ddf731d0a170d7f275a214d23aede3f67832f0fdfc55e52ffd2294102ddb" exitCode=255 Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.254855 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.254901 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.254845 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" event={"ID":"d8e9a89a-0411-4456-9637-08712b004662","Type":"ContainerDied","Data":"d036ddf731d0a170d7f275a214d23aede3f67832f0fdfc55e52ffd2294102ddb"} Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.255017 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg" event={"ID":"d8e9a89a-0411-4456-9637-08712b004662","Type":"ContainerDied","Data":"70be84e3d1d6ae7f1e57ee105fc054ab5aea084765f1b6ae067b22cfac2b18dd"} Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.255049 4906 scope.go:117] "RemoveContainer" containerID="d036ddf731d0a170d7f275a214d23aede3f67832f0fdfc55e52ffd2294102ddb" Feb 27 08:32:07 crc kubenswrapper[4906]: E0227 08:32:07.255264 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:07.75525279 +0000 UTC m=+226.149654400 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.254965 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8e9a89a-0411-4456-9637-08712b004662-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.255331 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ntr47\" (UniqueName: \"kubernetes.io/projected/d8e9a89a-0411-4456-9637-08712b004662-kube-api-access-ntr47\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.261717 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ppcv6" event={"ID":"74d50313-482b-4d26-acd9-ce88a2c53093","Type":"ContainerStarted","Data":"5d1852dfdb6545d4d38d9d020751d51a79c6074c64d5cc5a2a5ffd1b93f99ed6"} Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.280438 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" event={"ID":"fd02452f-1a87-43a5-8f32-3b63d9f522e3","Type":"ContainerStarted","Data":"e3f07b194ccce65cfa5d39dee14e670e7af1e69adce148e168e3e92e6cc7d153"} Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.289635 4906 scope.go:117] "RemoveContainer" containerID="d036ddf731d0a170d7f275a214d23aede3f67832f0fdfc55e52ffd2294102ddb" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.295009 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ppcv6" podStartSLOduration=178.294982723 podStartE2EDuration="2m58.294982723s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:07.28647615 +0000 UTC m=+225.680877760" watchObservedRunningTime="2026-02-27 08:32:07.294982723 +0000 UTC m=+225.689384333" Feb 27 08:32:07 crc kubenswrapper[4906]: E0227 08:32:07.296121 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d036ddf731d0a170d7f275a214d23aede3f67832f0fdfc55e52ffd2294102ddb\": container with ID starting with d036ddf731d0a170d7f275a214d23aede3f67832f0fdfc55e52ffd2294102ddb not found: ID does not exist" containerID="d036ddf731d0a170d7f275a214d23aede3f67832f0fdfc55e52ffd2294102ddb" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.296165 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d036ddf731d0a170d7f275a214d23aede3f67832f0fdfc55e52ffd2294102ddb"} err="failed to get container status \"d036ddf731d0a170d7f275a214d23aede3f67832f0fdfc55e52ffd2294102ddb\": rpc error: code = NotFound desc = could not find container \"d036ddf731d0a170d7f275a214d23aede3f67832f0fdfc55e52ffd2294102ddb\": container with ID starting with d036ddf731d0a170d7f275a214d23aede3f67832f0fdfc55e52ffd2294102ddb not found: ID does not exist" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.308888 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-hhgws" event={"ID":"ce4d5219-eab9-4fda-bf96-5a7ef9056300","Type":"ContainerStarted","Data":"6048fe30f6cb1ce1515448a9b4a84fc7345cf29f121b1c4d9cacf265a04b91dd"} Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.310758 4906 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkv6l container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.310808 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.311334 4906 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-pqnpm container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.311412 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" podUID="739fb53a-c353-4113-9f21-062b6580a184" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.311505 4906 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-m8zqh container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" start-of-body= Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.311521 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh" podUID="20abae03-4ab8-4e0e-91f2-f4ec1bfc9529" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.312350 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.324308 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" podStartSLOduration=179.324269618 podStartE2EDuration="2m59.324269618s" podCreationTimestamp="2026-02-27 08:29:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:07.322761115 +0000 UTC m=+225.717162755" watchObservedRunningTime="2026-02-27 08:32:07.324269618 +0000 UTC m=+225.718671228" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.346395 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg"] Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.350112 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-b7qtg"] Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.355846 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/390464d8-fc1d-443a-85f7-7164ac4e2d05-config\") pod \"390464d8-fc1d-443a-85f7-7164ac4e2d05\" (UID: \"390464d8-fc1d-443a-85f7-7164ac4e2d05\") " Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.355957 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/390464d8-fc1d-443a-85f7-7164ac4e2d05-client-ca\") pod \"390464d8-fc1d-443a-85f7-7164ac4e2d05\" (UID: \"390464d8-fc1d-443a-85f7-7164ac4e2d05\") " Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.356006 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4px78\" (UniqueName: \"kubernetes.io/projected/390464d8-fc1d-443a-85f7-7164ac4e2d05-kube-api-access-4px78\") pod \"390464d8-fc1d-443a-85f7-7164ac4e2d05\" (UID: \"390464d8-fc1d-443a-85f7-7164ac4e2d05\") " Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.356068 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/390464d8-fc1d-443a-85f7-7164ac4e2d05-serving-cert\") pod \"390464d8-fc1d-443a-85f7-7164ac4e2d05\" (UID: \"390464d8-fc1d-443a-85f7-7164ac4e2d05\") " Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.356167 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.356236 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/390464d8-fc1d-443a-85f7-7164ac4e2d05-proxy-ca-bundles\") pod \"390464d8-fc1d-443a-85f7-7164ac4e2d05\" (UID: \"390464d8-fc1d-443a-85f7-7164ac4e2d05\") " Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.358573 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/390464d8-fc1d-443a-85f7-7164ac4e2d05-config" (OuterVolumeSpecName: "config") pod "390464d8-fc1d-443a-85f7-7164ac4e2d05" (UID: "390464d8-fc1d-443a-85f7-7164ac4e2d05"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.359144 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/390464d8-fc1d-443a-85f7-7164ac4e2d05-client-ca" (OuterVolumeSpecName: "client-ca") pod "390464d8-fc1d-443a-85f7-7164ac4e2d05" (UID: "390464d8-fc1d-443a-85f7-7164ac4e2d05"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.362831 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/390464d8-fc1d-443a-85f7-7164ac4e2d05-kube-api-access-4px78" (OuterVolumeSpecName: "kube-api-access-4px78") pod "390464d8-fc1d-443a-85f7-7164ac4e2d05" (UID: "390464d8-fc1d-443a-85f7-7164ac4e2d05"). InnerVolumeSpecName "kube-api-access-4px78". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.368345 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/390464d8-fc1d-443a-85f7-7164ac4e2d05-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "390464d8-fc1d-443a-85f7-7164ac4e2d05" (UID: "390464d8-fc1d-443a-85f7-7164ac4e2d05"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:32:07 crc kubenswrapper[4906]: E0227 08:32:07.368475 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:07.868454498 +0000 UTC m=+226.262856108 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.371527 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/390464d8-fc1d-443a-85f7-7164ac4e2d05-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "390464d8-fc1d-443a-85f7-7164ac4e2d05" (UID: "390464d8-fc1d-443a-85f7-7164ac4e2d05"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.421532 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-9jcn2" podStartSLOduration=178.421504831 podStartE2EDuration="2m58.421504831s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:07.416320663 +0000 UTC m=+225.810722283" watchObservedRunningTime="2026-02-27 08:32:07.421504831 +0000 UTC m=+225.815906451" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.444405 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-hhgws" podStartSLOduration=9.444383113 podStartE2EDuration="9.444383113s" podCreationTimestamp="2026-02-27 08:31:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:07.441912943 +0000 UTC m=+225.836314553" watchObservedRunningTime="2026-02-27 08:32:07.444383113 +0000 UTC m=+225.838784723" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.459238 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.459372 4906 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/390464d8-fc1d-443a-85f7-7164ac4e2d05-client-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.459390 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4px78\" (UniqueName: \"kubernetes.io/projected/390464d8-fc1d-443a-85f7-7164ac4e2d05-kube-api-access-4px78\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.459405 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/390464d8-fc1d-443a-85f7-7164ac4e2d05-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.459416 4906 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/390464d8-fc1d-443a-85f7-7164ac4e2d05-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.459427 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/390464d8-fc1d-443a-85f7-7164ac4e2d05-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:07 crc kubenswrapper[4906]: E0227 08:32:07.459784 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:07.959765532 +0000 UTC m=+226.354167142 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.561156 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:07 crc kubenswrapper[4906]: E0227 08:32:07.561407 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:08.061368159 +0000 UTC m=+226.455769769 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.562722 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:07 crc kubenswrapper[4906]: E0227 08:32:07.563249 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:08.063225602 +0000 UTC m=+226.457627212 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.664400 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:07 crc kubenswrapper[4906]: E0227 08:32:07.664609 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:08.164573991 +0000 UTC m=+226.558975601 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.664847 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:07 crc kubenswrapper[4906]: E0227 08:32:07.665181 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:08.165173198 +0000 UTC m=+226.559574808 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.765760 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:07 crc kubenswrapper[4906]: E0227 08:32:07.766028 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:08.265990673 +0000 UTC m=+226.660392283 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.766095 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:07 crc kubenswrapper[4906]: E0227 08:32:07.766524 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:08.266512638 +0000 UTC m=+226.660914258 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.867453 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:07 crc kubenswrapper[4906]: E0227 08:32:07.867676 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:08.367635421 +0000 UTC m=+226.762037031 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.867802 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:07 crc kubenswrapper[4906]: E0227 08:32:07.868260 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:08.368249679 +0000 UTC m=+226.762651289 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.938736 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 27 08:32:07 crc kubenswrapper[4906]: [-]has-synced failed: reason withheld Feb 27 08:32:07 crc kubenswrapper[4906]: [+]process-running ok Feb 27 08:32:07 crc kubenswrapper[4906]: healthz check failed Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.938801 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.969126 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:07 crc kubenswrapper[4906]: E0227 08:32:07.969471 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:08.469420784 +0000 UTC m=+226.863822394 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:07 crc kubenswrapper[4906]: I0227 08:32:07.969616 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:07 crc kubenswrapper[4906]: E0227 08:32:07.970119 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:08.470097523 +0000 UTC m=+226.864499123 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.032948 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-xl55p" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.071477 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:08 crc kubenswrapper[4906]: E0227 08:32:08.071660 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:08.571626828 +0000 UTC m=+226.966028438 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.071779 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:08 crc kubenswrapper[4906]: E0227 08:32:08.072208 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:08.572200264 +0000 UTC m=+226.966601864 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.173852 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:08 crc kubenswrapper[4906]: E0227 08:32:08.174347 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:08.674321636 +0000 UTC m=+227.068723246 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.275637 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:08 crc kubenswrapper[4906]: E0227 08:32:08.276064 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:08.776047697 +0000 UTC m=+227.170449307 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.289221 4906 ???:1] "http: TLS handshake error from 192.168.126.11:50328: no serving certificate available for the kubelet" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.325491 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" event={"ID":"390464d8-fc1d-443a-85f7-7164ac4e2d05","Type":"ContainerDied","Data":"fc2dfcd733248be924a807b6e66df1a8b04691bab48b4dab5e2d284d20c14abf"} Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.325556 4906 scope.go:117] "RemoveContainer" containerID="98cfc4860b8cf1fb3a8d786f9622628bd2afee5c5a331adcaebbd8356e896bec" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.325607 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-qvd5m" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.333066 4906 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-pqnpm container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.333122 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" podUID="739fb53a-c353-4113-9f21-062b6580a184" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.30:8080/healthz\": dial tcp 10.217.0.30:8080: connect: connection refused" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.334607 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-hhgws" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.338515 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-m8zqh" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.375203 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qvd5m"] Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.376828 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:08 crc kubenswrapper[4906]: E0227 08:32:08.378397 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:08.878239071 +0000 UTC m=+227.272640691 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.384459 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-qvd5m"] Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.478939 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:08 crc kubenswrapper[4906]: E0227 08:32:08.479647 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:08.979616592 +0000 UTC m=+227.374018202 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.562109 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="390464d8-fc1d-443a-85f7-7164ac4e2d05" path="/var/lib/kubelet/pods/390464d8-fc1d-443a-85f7-7164ac4e2d05/volumes" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.563754 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8e9a89a-0411-4456-9637-08712b004662" path="/var/lib/kubelet/pods/d8e9a89a-0411-4456-9637-08712b004662/volumes" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.580418 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:08 crc kubenswrapper[4906]: E0227 08:32:08.580618 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:09.080586661 +0000 UTC m=+227.474988271 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.580743 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:08 crc kubenswrapper[4906]: E0227 08:32:08.581161 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:09.081147537 +0000 UTC m=+227.475549137 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.606500 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-696ff5488b-54ph8"] Feb 27 08:32:08 crc kubenswrapper[4906]: E0227 08:32:08.606762 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8e9a89a-0411-4456-9637-08712b004662" containerName="route-controller-manager" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.606777 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8e9a89a-0411-4456-9637-08712b004662" containerName="route-controller-manager" Feb 27 08:32:08 crc kubenswrapper[4906]: E0227 08:32:08.606787 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="390464d8-fc1d-443a-85f7-7164ac4e2d05" containerName="controller-manager" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.606794 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="390464d8-fc1d-443a-85f7-7164ac4e2d05" containerName="controller-manager" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.606922 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="390464d8-fc1d-443a-85f7-7164ac4e2d05" containerName="controller-manager" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.606941 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8e9a89a-0411-4456-9637-08712b004662" containerName="route-controller-manager" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.607330 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.612471 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.613148 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.613319 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.613515 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.613674 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.613808 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.618755 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22"] Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.619736 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.623678 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-696ff5488b-54ph8"] Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.627327 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.627865 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.628081 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.628254 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.628387 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.628615 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.628775 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.652573 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22"] Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.685344 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.685575 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ksww\" (UniqueName: \"kubernetes.io/projected/b63290f0-e4e3-4bc7-a863-86e2498b84a9-kube-api-access-4ksww\") pod \"route-controller-manager-5b65f5bd49-4kv22\" (UID: \"b63290f0-e4e3-4bc7-a863-86e2498b84a9\") " pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.685611 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f5136073-8bfc-4abe-a2ad-20531b058203-proxy-ca-bundles\") pod \"controller-manager-696ff5488b-54ph8\" (UID: \"f5136073-8bfc-4abe-a2ad-20531b058203\") " pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.685661 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5136073-8bfc-4abe-a2ad-20531b058203-config\") pod \"controller-manager-696ff5488b-54ph8\" (UID: \"f5136073-8bfc-4abe-a2ad-20531b058203\") " pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.685694 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b63290f0-e4e3-4bc7-a863-86e2498b84a9-client-ca\") pod \"route-controller-manager-5b65f5bd49-4kv22\" (UID: \"b63290f0-e4e3-4bc7-a863-86e2498b84a9\") " pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.685733 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b63290f0-e4e3-4bc7-a863-86e2498b84a9-config\") pod \"route-controller-manager-5b65f5bd49-4kv22\" (UID: \"b63290f0-e4e3-4bc7-a863-86e2498b84a9\") " pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.685756 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5136073-8bfc-4abe-a2ad-20531b058203-serving-cert\") pod \"controller-manager-696ff5488b-54ph8\" (UID: \"f5136073-8bfc-4abe-a2ad-20531b058203\") " pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.685784 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5136073-8bfc-4abe-a2ad-20531b058203-client-ca\") pod \"controller-manager-696ff5488b-54ph8\" (UID: \"f5136073-8bfc-4abe-a2ad-20531b058203\") " pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.685816 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66gwr\" (UniqueName: \"kubernetes.io/projected/f5136073-8bfc-4abe-a2ad-20531b058203-kube-api-access-66gwr\") pod \"controller-manager-696ff5488b-54ph8\" (UID: \"f5136073-8bfc-4abe-a2ad-20531b058203\") " pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.685843 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b63290f0-e4e3-4bc7-a863-86e2498b84a9-serving-cert\") pod \"route-controller-manager-5b65f5bd49-4kv22\" (UID: \"b63290f0-e4e3-4bc7-a863-86e2498b84a9\") " pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" Feb 27 08:32:08 crc kubenswrapper[4906]: E0227 08:32:08.686101 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:09.186053078 +0000 UTC m=+227.580454768 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.787583 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b63290f0-e4e3-4bc7-a863-86e2498b84a9-client-ca\") pod \"route-controller-manager-5b65f5bd49-4kv22\" (UID: \"b63290f0-e4e3-4bc7-a863-86e2498b84a9\") " pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.787677 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b63290f0-e4e3-4bc7-a863-86e2498b84a9-config\") pod \"route-controller-manager-5b65f5bd49-4kv22\" (UID: \"b63290f0-e4e3-4bc7-a863-86e2498b84a9\") " pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.787709 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5136073-8bfc-4abe-a2ad-20531b058203-serving-cert\") pod \"controller-manager-696ff5488b-54ph8\" (UID: \"f5136073-8bfc-4abe-a2ad-20531b058203\") " pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.787735 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5136073-8bfc-4abe-a2ad-20531b058203-client-ca\") pod \"controller-manager-696ff5488b-54ph8\" (UID: \"f5136073-8bfc-4abe-a2ad-20531b058203\") " pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.787772 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66gwr\" (UniqueName: \"kubernetes.io/projected/f5136073-8bfc-4abe-a2ad-20531b058203-kube-api-access-66gwr\") pod \"controller-manager-696ff5488b-54ph8\" (UID: \"f5136073-8bfc-4abe-a2ad-20531b058203\") " pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.787800 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b63290f0-e4e3-4bc7-a863-86e2498b84a9-serving-cert\") pod \"route-controller-manager-5b65f5bd49-4kv22\" (UID: \"b63290f0-e4e3-4bc7-a863-86e2498b84a9\") " pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.787842 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ksww\" (UniqueName: \"kubernetes.io/projected/b63290f0-e4e3-4bc7-a863-86e2498b84a9-kube-api-access-4ksww\") pod \"route-controller-manager-5b65f5bd49-4kv22\" (UID: \"b63290f0-e4e3-4bc7-a863-86e2498b84a9\") " pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.787868 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f5136073-8bfc-4abe-a2ad-20531b058203-proxy-ca-bundles\") pod \"controller-manager-696ff5488b-54ph8\" (UID: \"f5136073-8bfc-4abe-a2ad-20531b058203\") " pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.787941 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.787967 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5136073-8bfc-4abe-a2ad-20531b058203-config\") pod \"controller-manager-696ff5488b-54ph8\" (UID: \"f5136073-8bfc-4abe-a2ad-20531b058203\") " pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.788827 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b63290f0-e4e3-4bc7-a863-86e2498b84a9-client-ca\") pod \"route-controller-manager-5b65f5bd49-4kv22\" (UID: \"b63290f0-e4e3-4bc7-a863-86e2498b84a9\") " pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" Feb 27 08:32:08 crc kubenswrapper[4906]: E0227 08:32:08.788846 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:09.288827739 +0000 UTC m=+227.683229349 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.788848 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5136073-8bfc-4abe-a2ad-20531b058203-client-ca\") pod \"controller-manager-696ff5488b-54ph8\" (UID: \"f5136073-8bfc-4abe-a2ad-20531b058203\") " pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.789853 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b63290f0-e4e3-4bc7-a863-86e2498b84a9-config\") pod \"route-controller-manager-5b65f5bd49-4kv22\" (UID: \"b63290f0-e4e3-4bc7-a863-86e2498b84a9\") " pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.790271 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5136073-8bfc-4abe-a2ad-20531b058203-config\") pod \"controller-manager-696ff5488b-54ph8\" (UID: \"f5136073-8bfc-4abe-a2ad-20531b058203\") " pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.790460 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f5136073-8bfc-4abe-a2ad-20531b058203-proxy-ca-bundles\") pod \"controller-manager-696ff5488b-54ph8\" (UID: \"f5136073-8bfc-4abe-a2ad-20531b058203\") " pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.797995 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5136073-8bfc-4abe-a2ad-20531b058203-serving-cert\") pod \"controller-manager-696ff5488b-54ph8\" (UID: \"f5136073-8bfc-4abe-a2ad-20531b058203\") " pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.811599 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66gwr\" (UniqueName: \"kubernetes.io/projected/f5136073-8bfc-4abe-a2ad-20531b058203-kube-api-access-66gwr\") pod \"controller-manager-696ff5488b-54ph8\" (UID: \"f5136073-8bfc-4abe-a2ad-20531b058203\") " pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.888798 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:08 crc kubenswrapper[4906]: E0227 08:32:08.889076 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:09.389010826 +0000 UTC m=+227.783412436 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.889159 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:08 crc kubenswrapper[4906]: E0227 08:32:08.890816 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:09.390807837 +0000 UTC m=+227.785209447 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.897267 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b63290f0-e4e3-4bc7-a863-86e2498b84a9-serving-cert\") pod \"route-controller-manager-5b65f5bd49-4kv22\" (UID: \"b63290f0-e4e3-4bc7-a863-86e2498b84a9\") " pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.925035 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.939381 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 27 08:32:08 crc kubenswrapper[4906]: [-]has-synced failed: reason withheld Feb 27 08:32:08 crc kubenswrapper[4906]: [+]process-running ok Feb 27 08:32:08 crc kubenswrapper[4906]: healthz check failed Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.939437 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.981600 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ksww\" (UniqueName: \"kubernetes.io/projected/b63290f0-e4e3-4bc7-a863-86e2498b84a9-kube-api-access-4ksww\") pod \"route-controller-manager-5b65f5bd49-4kv22\" (UID: \"b63290f0-e4e3-4bc7-a863-86e2498b84a9\") " pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" Feb 27 08:32:08 crc kubenswrapper[4906]: I0227 08:32:08.992239 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:08 crc kubenswrapper[4906]: E0227 08:32:08.992768 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:09.492741724 +0000 UTC m=+227.887143334 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.094447 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:09 crc kubenswrapper[4906]: E0227 08:32:09.095088 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:09.595058721 +0000 UTC m=+227.989460331 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.195976 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:09 crc kubenswrapper[4906]: E0227 08:32:09.196252 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:09.696214736 +0000 UTC m=+228.090616346 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.196340 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:09 crc kubenswrapper[4906]: E0227 08:32:09.196816 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:09.696794262 +0000 UTC m=+228.091195872 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.257186 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.299057 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:09 crc kubenswrapper[4906]: E0227 08:32:09.299960 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:09.799919353 +0000 UTC m=+228.194320963 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.370772 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-696ff5488b-54ph8"] Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.401951 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:09 crc kubenswrapper[4906]: E0227 08:32:09.402491 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:09.902471307 +0000 UTC m=+228.296872917 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.503442 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:09 crc kubenswrapper[4906]: E0227 08:32:09.503709 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:10.003666533 +0000 UTC m=+228.398068143 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.504171 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:09 crc kubenswrapper[4906]: E0227 08:32:09.504560 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:10.004545778 +0000 UTC m=+228.398947388 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.542283 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22"] Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.606894 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:09 crc kubenswrapper[4906]: E0227 08:32:09.607137 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:10.107100032 +0000 UTC m=+228.501501642 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.607238 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:09 crc kubenswrapper[4906]: E0227 08:32:09.607609 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:10.107592136 +0000 UTC m=+228.501993746 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.715385 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:09 crc kubenswrapper[4906]: E0227 08:32:09.715536 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:10.215508813 +0000 UTC m=+228.609910433 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.715727 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:09 crc kubenswrapper[4906]: E0227 08:32:09.716104 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:10.21609446 +0000 UTC m=+228.610496070 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.817023 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:09 crc kubenswrapper[4906]: E0227 08:32:09.817233 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:10.317194033 +0000 UTC m=+228.711595643 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.817441 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:09 crc kubenswrapper[4906]: E0227 08:32:09.817932 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:10.317922054 +0000 UTC m=+228.712323664 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.850270 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.850332 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.860930 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-hw96v"] Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.863158 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hw96v" Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.865748 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.870253 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.881446 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hw96v"] Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.881817 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.881896 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.919296 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:09 crc kubenswrapper[4906]: E0227 08:32:09.920712 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:10.420684784 +0000 UTC m=+228.815086394 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.944661 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 27 08:32:09 crc kubenswrapper[4906]: [-]has-synced failed: reason withheld Feb 27 08:32:09 crc kubenswrapper[4906]: [+]process-running ok Feb 27 08:32:09 crc kubenswrapper[4906]: healthz check failed Feb 27 08:32:09 crc kubenswrapper[4906]: I0227 08:32:09.944744 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.021184 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:10 crc kubenswrapper[4906]: E0227 08:32:10.021662 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:10.521639483 +0000 UTC m=+228.916041093 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.021816 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9c97127-8fdd-40b2-8248-40df8c50e302-catalog-content\") pod \"certified-operators-hw96v\" (UID: \"f9c97127-8fdd-40b2-8248-40df8c50e302\") " pod="openshift-marketplace/certified-operators-hw96v" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.021870 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9c97127-8fdd-40b2-8248-40df8c50e302-utilities\") pod \"certified-operators-hw96v\" (UID: \"f9c97127-8fdd-40b2-8248-40df8c50e302\") " pod="openshift-marketplace/certified-operators-hw96v" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.021919 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4w5lx\" (UniqueName: \"kubernetes.io/projected/f9c97127-8fdd-40b2-8248-40df8c50e302-kube-api-access-4w5lx\") pod \"certified-operators-hw96v\" (UID: \"f9c97127-8fdd-40b2-8248-40df8c50e302\") " pod="openshift-marketplace/certified-operators-hw96v" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.059430 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-t5l4n"] Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.060491 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t5l4n" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.064792 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.077125 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-t5l4n"] Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.093780 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-dq899" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.128753 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.129104 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9c97127-8fdd-40b2-8248-40df8c50e302-utilities\") pod \"certified-operators-hw96v\" (UID: \"f9c97127-8fdd-40b2-8248-40df8c50e302\") " pod="openshift-marketplace/certified-operators-hw96v" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.129131 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4w5lx\" (UniqueName: \"kubernetes.io/projected/f9c97127-8fdd-40b2-8248-40df8c50e302-kube-api-access-4w5lx\") pod \"certified-operators-hw96v\" (UID: \"f9c97127-8fdd-40b2-8248-40df8c50e302\") " pod="openshift-marketplace/certified-operators-hw96v" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.129187 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86ce64fc-356d-4172-b0c0-8074921dc727-catalog-content\") pod \"community-operators-t5l4n\" (UID: \"86ce64fc-356d-4172-b0c0-8074921dc727\") " pod="openshift-marketplace/community-operators-t5l4n" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.129275 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86ce64fc-356d-4172-b0c0-8074921dc727-utilities\") pod \"community-operators-t5l4n\" (UID: \"86ce64fc-356d-4172-b0c0-8074921dc727\") " pod="openshift-marketplace/community-operators-t5l4n" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.129307 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9c97127-8fdd-40b2-8248-40df8c50e302-catalog-content\") pod \"certified-operators-hw96v\" (UID: \"f9c97127-8fdd-40b2-8248-40df8c50e302\") " pod="openshift-marketplace/certified-operators-hw96v" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.129338 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n7kz\" (UniqueName: \"kubernetes.io/projected/86ce64fc-356d-4172-b0c0-8074921dc727-kube-api-access-8n7kz\") pod \"community-operators-t5l4n\" (UID: \"86ce64fc-356d-4172-b0c0-8074921dc727\") " pod="openshift-marketplace/community-operators-t5l4n" Feb 27 08:32:10 crc kubenswrapper[4906]: E0227 08:32:10.129483 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:10.629463727 +0000 UTC m=+229.023865337 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.129983 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9c97127-8fdd-40b2-8248-40df8c50e302-utilities\") pod \"certified-operators-hw96v\" (UID: \"f9c97127-8fdd-40b2-8248-40df8c50e302\") " pod="openshift-marketplace/certified-operators-hw96v" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.131238 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9c97127-8fdd-40b2-8248-40df8c50e302-catalog-content\") pod \"certified-operators-hw96v\" (UID: \"f9c97127-8fdd-40b2-8248-40df8c50e302\") " pod="openshift-marketplace/certified-operators-hw96v" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.147605 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.148444 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.158536 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.158875 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.167116 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.176807 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4w5lx\" (UniqueName: \"kubernetes.io/projected/f9c97127-8fdd-40b2-8248-40df8c50e302-kube-api-access-4w5lx\") pod \"certified-operators-hw96v\" (UID: \"f9c97127-8fdd-40b2-8248-40df8c50e302\") " pod="openshift-marketplace/certified-operators-hw96v" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.191505 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hw96v" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.232233 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86ce64fc-356d-4172-b0c0-8074921dc727-utilities\") pod \"community-operators-t5l4n\" (UID: \"86ce64fc-356d-4172-b0c0-8074921dc727\") " pod="openshift-marketplace/community-operators-t5l4n" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.232297 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n7kz\" (UniqueName: \"kubernetes.io/projected/86ce64fc-356d-4172-b0c0-8074921dc727-kube-api-access-8n7kz\") pod \"community-operators-t5l4n\" (UID: \"86ce64fc-356d-4172-b0c0-8074921dc727\") " pod="openshift-marketplace/community-operators-t5l4n" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.232349 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/407da64d-d9bf-4d67-b741-37fa71780563-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"407da64d-d9bf-4d67-b741-37fa71780563\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.232406 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86ce64fc-356d-4172-b0c0-8074921dc727-catalog-content\") pod \"community-operators-t5l4n\" (UID: \"86ce64fc-356d-4172-b0c0-8074921dc727\") " pod="openshift-marketplace/community-operators-t5l4n" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.232443 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.232510 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/407da64d-d9bf-4d67-b741-37fa71780563-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"407da64d-d9bf-4d67-b741-37fa71780563\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 27 08:32:10 crc kubenswrapper[4906]: E0227 08:32:10.233868 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:10.733850544 +0000 UTC m=+229.128252154 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.235592 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86ce64fc-356d-4172-b0c0-8074921dc727-utilities\") pod \"community-operators-t5l4n\" (UID: \"86ce64fc-356d-4172-b0c0-8074921dc727\") " pod="openshift-marketplace/community-operators-t5l4n" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.235772 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86ce64fc-356d-4172-b0c0-8074921dc727-catalog-content\") pod \"community-operators-t5l4n\" (UID: \"86ce64fc-356d-4172-b0c0-8074921dc727\") " pod="openshift-marketplace/community-operators-t5l4n" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.264500 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n7kz\" (UniqueName: \"kubernetes.io/projected/86ce64fc-356d-4172-b0c0-8074921dc727-kube-api-access-8n7kz\") pod \"community-operators-t5l4n\" (UID: \"86ce64fc-356d-4172-b0c0-8074921dc727\") " pod="openshift-marketplace/community-operators-t5l4n" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.284407 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pfbgw"] Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.285916 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pfbgw" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.308927 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pfbgw"] Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.334385 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.334845 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/407da64d-d9bf-4d67-b741-37fa71780563-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"407da64d-d9bf-4d67-b741-37fa71780563\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.335011 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/407da64d-d9bf-4d67-b741-37fa71780563-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"407da64d-d9bf-4d67-b741-37fa71780563\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 27 08:32:10 crc kubenswrapper[4906]: E0227 08:32:10.336280 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:10.836225513 +0000 UTC m=+229.230627123 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.336361 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/407da64d-d9bf-4d67-b741-37fa71780563-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"407da64d-d9bf-4d67-b741-37fa71780563\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.367185 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/407da64d-d9bf-4d67-b741-37fa71780563-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"407da64d-d9bf-4d67-b741-37fa71780563\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.383960 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" event={"ID":"f5136073-8bfc-4abe-a2ad-20531b058203","Type":"ContainerStarted","Data":"a80f96fc131a706fcb5b28cdeb2ec72ec0bb09f5067c29ace3b4face17766476"} Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.384058 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" event={"ID":"f5136073-8bfc-4abe-a2ad-20531b058203","Type":"ContainerStarted","Data":"de714af42e53c67535c497c5b3784e4a8ad13ed25e83a2afd7b5282d2d784ca3"} Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.384107 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.387784 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t5l4n" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.397746 4906 generic.go:334] "Generic (PLEG): container finished" podID="ac646565-cd5f-405a-ad92-3f2afb51d5c8" containerID="518463c7d5f4d22b76c4a2479bc7d8eb22106a8f02cce6f686e3bdb0081c951a" exitCode=0 Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.397906 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx" event={"ID":"ac646565-cd5f-405a-ad92-3f2afb51d5c8","Type":"ContainerDied","Data":"518463c7d5f4d22b76c4a2479bc7d8eb22106a8f02cce6f686e3bdb0081c951a"} Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.400771 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.405241 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.409743 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" event={"ID":"b63290f0-e4e3-4bc7-a863-86e2498b84a9","Type":"ContainerStarted","Data":"ee76aa46b4c8bc71a3738399d0eaf1df26b129197711c86d9b50990f6820c8d0"} Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.412055 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.412099 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" event={"ID":"b63290f0-e4e3-4bc7-a863-86e2498b84a9","Type":"ContainerStarted","Data":"4d7eb34770475b004b43352ced8d96f365a90f1713a848954e2b16df065501ca"} Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.413625 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.414645 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.416067 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.425466 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-gbqf7" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.425935 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" podStartSLOduration=4.42591251 podStartE2EDuration="4.42591251s" podCreationTimestamp="2026-02-27 08:32:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:10.412715584 +0000 UTC m=+228.807117194" watchObservedRunningTime="2026-02-27 08:32:10.42591251 +0000 UTC m=+228.820314120" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.426145 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.439241 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.439305 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d81cc2f1-f6bc-454c-a927-973bf6bc452b-catalog-content\") pod \"certified-operators-pfbgw\" (UID: \"d81cc2f1-f6bc-454c-a927-973bf6bc452b\") " pod="openshift-marketplace/certified-operators-pfbgw" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.439338 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qblcj\" (UniqueName: \"kubernetes.io/projected/d81cc2f1-f6bc-454c-a927-973bf6bc452b-kube-api-access-qblcj\") pod \"certified-operators-pfbgw\" (UID: \"d81cc2f1-f6bc-454c-a927-973bf6bc452b\") " pod="openshift-marketplace/certified-operators-pfbgw" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.439400 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d81cc2f1-f6bc-454c-a927-973bf6bc452b-utilities\") pod \"certified-operators-pfbgw\" (UID: \"d81cc2f1-f6bc-454c-a927-973bf6bc452b\") " pod="openshift-marketplace/certified-operators-pfbgw" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.443070 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" podStartSLOduration=4.443033909 podStartE2EDuration="4.443033909s" podCreationTimestamp="2026-02-27 08:32:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:10.440949949 +0000 UTC m=+228.835351559" watchObservedRunningTime="2026-02-27 08:32:10.443033909 +0000 UTC m=+228.837435519" Feb 27 08:32:10 crc kubenswrapper[4906]: E0227 08:32:10.446710 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:10.946642432 +0000 UTC m=+229.341044052 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.474916 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zn5pm"] Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.492318 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zn5pm" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.505557 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.541904 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.542440 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcgrm\" (UniqueName: \"kubernetes.io/projected/2604542b-6cc4-44dc-ab74-f493ac742db9-kube-api-access-fcgrm\") pod \"community-operators-zn5pm\" (UID: \"2604542b-6cc4-44dc-ab74-f493ac742db9\") " pod="openshift-marketplace/community-operators-zn5pm" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.542523 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d81cc2f1-f6bc-454c-a927-973bf6bc452b-utilities\") pod \"certified-operators-pfbgw\" (UID: \"d81cc2f1-f6bc-454c-a927-973bf6bc452b\") " pod="openshift-marketplace/certified-operators-pfbgw" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.542740 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d81cc2f1-f6bc-454c-a927-973bf6bc452b-catalog-content\") pod \"certified-operators-pfbgw\" (UID: \"d81cc2f1-f6bc-454c-a927-973bf6bc452b\") " pod="openshift-marketplace/certified-operators-pfbgw" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.542796 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qblcj\" (UniqueName: \"kubernetes.io/projected/d81cc2f1-f6bc-454c-a927-973bf6bc452b-kube-api-access-qblcj\") pod \"certified-operators-pfbgw\" (UID: \"d81cc2f1-f6bc-454c-a927-973bf6bc452b\") " pod="openshift-marketplace/certified-operators-pfbgw" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.542875 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/52d06262-85a7-4b1c-bd88-f1f16acafff5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"52d06262-85a7-4b1c-bd88-f1f16acafff5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.542975 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2604542b-6cc4-44dc-ab74-f493ac742db9-utilities\") pod \"community-operators-zn5pm\" (UID: \"2604542b-6cc4-44dc-ab74-f493ac742db9\") " pod="openshift-marketplace/community-operators-zn5pm" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.543069 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2604542b-6cc4-44dc-ab74-f493ac742db9-catalog-content\") pod \"community-operators-zn5pm\" (UID: \"2604542b-6cc4-44dc-ab74-f493ac742db9\") " pod="openshift-marketplace/community-operators-zn5pm" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.543120 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/52d06262-85a7-4b1c-bd88-f1f16acafff5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"52d06262-85a7-4b1c-bd88-f1f16acafff5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 27 08:32:10 crc kubenswrapper[4906]: E0227 08:32:10.543352 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:11.043322249 +0000 UTC m=+229.437723859 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.591668 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d81cc2f1-f6bc-454c-a927-973bf6bc452b-utilities\") pod \"certified-operators-pfbgw\" (UID: \"d81cc2f1-f6bc-454c-a927-973bf6bc452b\") " pod="openshift-marketplace/certified-operators-pfbgw" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.594248 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d81cc2f1-f6bc-454c-a927-973bf6bc452b-catalog-content\") pod \"certified-operators-pfbgw\" (UID: \"d81cc2f1-f6bc-454c-a927-973bf6bc452b\") " pod="openshift-marketplace/certified-operators-pfbgw" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.626620 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qblcj\" (UniqueName: \"kubernetes.io/projected/d81cc2f1-f6bc-454c-a927-973bf6bc452b-kube-api-access-qblcj\") pod \"certified-operators-pfbgw\" (UID: \"d81cc2f1-f6bc-454c-a927-973bf6bc452b\") " pod="openshift-marketplace/certified-operators-pfbgw" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.630192 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.630226 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zn5pm"] Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.643696 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.643760 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/52d06262-85a7-4b1c-bd88-f1f16acafff5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"52d06262-85a7-4b1c-bd88-f1f16acafff5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.643794 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2604542b-6cc4-44dc-ab74-f493ac742db9-utilities\") pod \"community-operators-zn5pm\" (UID: \"2604542b-6cc4-44dc-ab74-f493ac742db9\") " pod="openshift-marketplace/community-operators-zn5pm" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.645034 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/52d06262-85a7-4b1c-bd88-f1f16acafff5-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"52d06262-85a7-4b1c-bd88-f1f16acafff5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.645095 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2604542b-6cc4-44dc-ab74-f493ac742db9-catalog-content\") pod \"community-operators-zn5pm\" (UID: \"2604542b-6cc4-44dc-ab74-f493ac742db9\") " pod="openshift-marketplace/community-operators-zn5pm" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.645460 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2604542b-6cc4-44dc-ab74-f493ac742db9-utilities\") pod \"community-operators-zn5pm\" (UID: \"2604542b-6cc4-44dc-ab74-f493ac742db9\") " pod="openshift-marketplace/community-operators-zn5pm" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.645538 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/52d06262-85a7-4b1c-bd88-f1f16acafff5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"52d06262-85a7-4b1c-bd88-f1f16acafff5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.645826 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2604542b-6cc4-44dc-ab74-f493ac742db9-catalog-content\") pod \"community-operators-zn5pm\" (UID: \"2604542b-6cc4-44dc-ab74-f493ac742db9\") " pod="openshift-marketplace/community-operators-zn5pm" Feb 27 08:32:10 crc kubenswrapper[4906]: E0227 08:32:10.645913 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:11.145873623 +0000 UTC m=+229.540275233 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.646018 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcgrm\" (UniqueName: \"kubernetes.io/projected/2604542b-6cc4-44dc-ab74-f493ac742db9-kube-api-access-fcgrm\") pod \"community-operators-zn5pm\" (UID: \"2604542b-6cc4-44dc-ab74-f493ac742db9\") " pod="openshift-marketplace/community-operators-zn5pm" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.706462 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/52d06262-85a7-4b1c-bd88-f1f16acafff5-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"52d06262-85a7-4b1c-bd88-f1f16acafff5\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.711777 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcgrm\" (UniqueName: \"kubernetes.io/projected/2604542b-6cc4-44dc-ab74-f493ac742db9-kube-api-access-fcgrm\") pod \"community-operators-zn5pm\" (UID: \"2604542b-6cc4-44dc-ab74-f493ac742db9\") " pod="openshift-marketplace/community-operators-zn5pm" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.752300 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:10 crc kubenswrapper[4906]: E0227 08:32:10.752973 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:11.252946566 +0000 UTC m=+229.647348186 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.760028 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.817752 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.817827 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.826980 4906 patch_prober.go:28] interesting pod/console-f9d7485db-xx8qm container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.22:8443/health\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.827081 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-xx8qm" podUID="a861f5dc-100c-443f-ab72-ecfe71895998" containerName="console" probeResult="failure" output="Get \"https://10.217.0.22:8443/health\": dial tcp 10.217.0.22:8443: connect: connection refused" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.855750 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:10 crc kubenswrapper[4906]: E0227 08:32:10.856907 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:11.356861239 +0000 UTC m=+229.751262949 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.877925 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zn5pm" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.905321 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-hw96v"] Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.927248 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pfbgw" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.937465 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.942947 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 27 08:32:10 crc kubenswrapper[4906]: [-]has-synced failed: reason withheld Feb 27 08:32:10 crc kubenswrapper[4906]: [+]process-running ok Feb 27 08:32:10 crc kubenswrapper[4906]: healthz check failed Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.943015 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.949614 4906 ???:1] "http: TLS handshake error from 192.168.126.11:48298: no serving certificate available for the kubelet" Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.963669 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:10 crc kubenswrapper[4906]: E0227 08:32:10.964786 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:11.464764446 +0000 UTC m=+229.859166056 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:10 crc kubenswrapper[4906]: I0227 08:32:10.965262 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:10 crc kubenswrapper[4906]: E0227 08:32:10.965769 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:11.465761144 +0000 UTC m=+229.860162754 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.068730 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:11 crc kubenswrapper[4906]: E0227 08:32:11.069207 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:11.569185823 +0000 UTC m=+229.963587433 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.124383 4906 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkv6l container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.124436 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.124490 4906 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkv6l container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.124558 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.164331 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-t5l4n"] Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.171957 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:11 crc kubenswrapper[4906]: E0227 08:32:11.172379 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:11.672363436 +0000 UTC m=+230.066765036 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.224414 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" Feb 27 08:32:11 crc kubenswrapper[4906]: W0227 08:32:11.267988 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod86ce64fc_356d_4172_b0c0_8074921dc727.slice/crio-871eddc81a1199d87918b2bdf0f97755ff80fa04db1d8d6e51ad8f6d235cccd1 WatchSource:0}: Error finding container 871eddc81a1199d87918b2bdf0f97755ff80fa04db1d8d6e51ad8f6d235cccd1: Status 404 returned error can't find the container with id 871eddc81a1199d87918b2bdf0f97755ff80fa04db1d8d6e51ad8f6d235cccd1 Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.275384 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:11 crc kubenswrapper[4906]: E0227 08:32:11.276993 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:11.776965677 +0000 UTC m=+230.171367287 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.377342 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:11 crc kubenswrapper[4906]: E0227 08:32:11.378024 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:11.878004779 +0000 UTC m=+230.272406389 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.441589 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hw96v" event={"ID":"f9c97127-8fdd-40b2-8248-40df8c50e302","Type":"ContainerStarted","Data":"a43e5ba780a0193025a5b3f83638aecae99bd0bb5cbb7f70e7fb802f7a745af9"} Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.475949 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t5l4n" event={"ID":"86ce64fc-356d-4172-b0c0-8074921dc727","Type":"ContainerStarted","Data":"871eddc81a1199d87918b2bdf0f97755ff80fa04db1d8d6e51ad8f6d235cccd1"} Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.477985 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:11 crc kubenswrapper[4906]: E0227 08:32:11.478231 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:11.978211936 +0000 UTC m=+230.372613546 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.580294 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:11 crc kubenswrapper[4906]: E0227 08:32:11.580629 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:12.080614386 +0000 UTC m=+230.475015996 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.593292 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.682663 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:11 crc kubenswrapper[4906]: E0227 08:32:11.684720 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:12.184684554 +0000 UTC m=+230.579086354 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.712176 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.785584 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:11 crc kubenswrapper[4906]: E0227 08:32:11.786214 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:12.286194128 +0000 UTC m=+230.680595738 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.852712 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-knxlc"] Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.854384 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-knxlc" Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.856359 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.865036 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-knxlc"] Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.883121 4906 patch_prober.go:28] interesting pod/apiserver-76f77b778f-zl4pc container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Feb 27 08:32:11 crc kubenswrapper[4906]: [+]log ok Feb 27 08:32:11 crc kubenswrapper[4906]: [+]etcd ok Feb 27 08:32:11 crc kubenswrapper[4906]: [+]poststarthook/start-apiserver-admission-initializer ok Feb 27 08:32:11 crc kubenswrapper[4906]: [+]poststarthook/generic-apiserver-start-informers ok Feb 27 08:32:11 crc kubenswrapper[4906]: [+]poststarthook/max-in-flight-filter ok Feb 27 08:32:11 crc kubenswrapper[4906]: [+]poststarthook/storage-object-count-tracker-hook ok Feb 27 08:32:11 crc kubenswrapper[4906]: [+]poststarthook/image.openshift.io-apiserver-caches ok Feb 27 08:32:11 crc kubenswrapper[4906]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Feb 27 08:32:11 crc kubenswrapper[4906]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Feb 27 08:32:11 crc kubenswrapper[4906]: [+]poststarthook/project.openshift.io-projectcache ok Feb 27 08:32:11 crc kubenswrapper[4906]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Feb 27 08:32:11 crc kubenswrapper[4906]: [+]poststarthook/openshift.io-startinformers ok Feb 27 08:32:11 crc kubenswrapper[4906]: [+]poststarthook/openshift.io-restmapperupdater ok Feb 27 08:32:11 crc kubenswrapper[4906]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Feb 27 08:32:11 crc kubenswrapper[4906]: livez check failed Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.883979 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" podUID="fd02452f-1a87-43a5-8f32-3b63d9f522e3" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.892665 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:11 crc kubenswrapper[4906]: E0227 08:32:11.909224 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:12.409164825 +0000 UTC m=+230.803566435 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.910424 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zn5pm"] Feb 27 08:32:11 crc kubenswrapper[4906]: W0227 08:32:11.963501 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2604542b_6cc4_44dc_ab74_f493ac742db9.slice/crio-a919fd27d94ba36cac40777783c534cbbd4fb2b2eba5bf623c26d96c9bb84fbe WatchSource:0}: Error finding container a919fd27d94ba36cac40777783c534cbbd4fb2b2eba5bf623c26d96c9bb84fbe: Status 404 returned error can't find the container with id a919fd27d94ba36cac40777783c534cbbd4fb2b2eba5bf623c26d96c9bb84fbe Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.963526 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 27 08:32:11 crc kubenswrapper[4906]: [-]has-synced failed: reason withheld Feb 27 08:32:11 crc kubenswrapper[4906]: [+]process-running ok Feb 27 08:32:11 crc kubenswrapper[4906]: healthz check failed Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.963654 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.995002 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe1433ef-5f77-4598-bd41-4cd2da2a8bd7-utilities\") pod \"redhat-marketplace-knxlc\" (UID: \"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7\") " pod="openshift-marketplace/redhat-marketplace-knxlc" Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.995049 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe1433ef-5f77-4598-bd41-4cd2da2a8bd7-catalog-content\") pod \"redhat-marketplace-knxlc\" (UID: \"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7\") " pod="openshift-marketplace/redhat-marketplace-knxlc" Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.995084 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:11 crc kubenswrapper[4906]: I0227 08:32:11.995121 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tpxwg\" (UniqueName: \"kubernetes.io/projected/fe1433ef-5f77-4598-bd41-4cd2da2a8bd7-kube-api-access-tpxwg\") pod \"redhat-marketplace-knxlc\" (UID: \"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7\") " pod="openshift-marketplace/redhat-marketplace-knxlc" Feb 27 08:32:11 crc kubenswrapper[4906]: E0227 08:32:11.995526 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:12.495511497 +0000 UTC m=+230.889913107 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.097133 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:12 crc kubenswrapper[4906]: E0227 08:32:12.097356 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:12.59731675 +0000 UTC m=+230.991718360 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.097444 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe1433ef-5f77-4598-bd41-4cd2da2a8bd7-utilities\") pod \"redhat-marketplace-knxlc\" (UID: \"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7\") " pod="openshift-marketplace/redhat-marketplace-knxlc" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.097502 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe1433ef-5f77-4598-bd41-4cd2da2a8bd7-catalog-content\") pod \"redhat-marketplace-knxlc\" (UID: \"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7\") " pod="openshift-marketplace/redhat-marketplace-knxlc" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.097537 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.097587 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tpxwg\" (UniqueName: \"kubernetes.io/projected/fe1433ef-5f77-4598-bd41-4cd2da2a8bd7-kube-api-access-tpxwg\") pod \"redhat-marketplace-knxlc\" (UID: \"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7\") " pod="openshift-marketplace/redhat-marketplace-knxlc" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.098129 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe1433ef-5f77-4598-bd41-4cd2da2a8bd7-catalog-content\") pod \"redhat-marketplace-knxlc\" (UID: \"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7\") " pod="openshift-marketplace/redhat-marketplace-knxlc" Feb 27 08:32:12 crc kubenswrapper[4906]: E0227 08:32:12.098356 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:12.598331509 +0000 UTC m=+230.992733199 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.098381 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe1433ef-5f77-4598-bd41-4cd2da2a8bd7-utilities\") pod \"redhat-marketplace-knxlc\" (UID: \"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7\") " pod="openshift-marketplace/redhat-marketplace-knxlc" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.142547 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.157819 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tpxwg\" (UniqueName: \"kubernetes.io/projected/fe1433ef-5f77-4598-bd41-4cd2da2a8bd7-kube-api-access-tpxwg\") pod \"redhat-marketplace-knxlc\" (UID: \"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7\") " pod="openshift-marketplace/redhat-marketplace-knxlc" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.191666 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pfbgw"] Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.199847 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:12 crc kubenswrapper[4906]: E0227 08:32:12.200037 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:12.699999048 +0000 UTC m=+231.094400668 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.200260 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:12 crc kubenswrapper[4906]: E0227 08:32:12.200673 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:12.700655896 +0000 UTC m=+231.095057506 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:12 crc kubenswrapper[4906]: W0227 08:32:12.245111 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd81cc2f1_f6bc_454c_a927_973bf6bc452b.slice/crio-caac9ad3fb20410454f1a45a6952b25c93cc8e8114930175bfadddc18b5c1050 WatchSource:0}: Error finding container caac9ad3fb20410454f1a45a6952b25c93cc8e8114930175bfadddc18b5c1050: Status 404 returned error can't find the container with id caac9ad3fb20410454f1a45a6952b25c93cc8e8114930175bfadddc18b5c1050 Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.273940 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-knxlc" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.274457 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jxdrs"] Feb 27 08:32:12 crc kubenswrapper[4906]: E0227 08:32:12.274767 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ac646565-cd5f-405a-ad92-3f2afb51d5c8" containerName="collect-profiles" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.274780 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="ac646565-cd5f-405a-ad92-3f2afb51d5c8" containerName="collect-profiles" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.274908 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="ac646565-cd5f-405a-ad92-3f2afb51d5c8" containerName="collect-profiles" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.275860 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jxdrs" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.291186 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jxdrs"] Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.303680 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9sgwc\" (UniqueName: \"kubernetes.io/projected/ac646565-cd5f-405a-ad92-3f2afb51d5c8-kube-api-access-9sgwc\") pod \"ac646565-cd5f-405a-ad92-3f2afb51d5c8\" (UID: \"ac646565-cd5f-405a-ad92-3f2afb51d5c8\") " Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.303760 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ac646565-cd5f-405a-ad92-3f2afb51d5c8-secret-volume\") pod \"ac646565-cd5f-405a-ad92-3f2afb51d5c8\" (UID: \"ac646565-cd5f-405a-ad92-3f2afb51d5c8\") " Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.303794 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ac646565-cd5f-405a-ad92-3f2afb51d5c8-config-volume\") pod \"ac646565-cd5f-405a-ad92-3f2afb51d5c8\" (UID: \"ac646565-cd5f-405a-ad92-3f2afb51d5c8\") " Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.303981 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:12 crc kubenswrapper[4906]: E0227 08:32:12.304330 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:12.804310162 +0000 UTC m=+231.198711772 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.305972 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac646565-cd5f-405a-ad92-3f2afb51d5c8-config-volume" (OuterVolumeSpecName: "config-volume") pod "ac646565-cd5f-405a-ad92-3f2afb51d5c8" (UID: "ac646565-cd5f-405a-ad92-3f2afb51d5c8"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.315893 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac646565-cd5f-405a-ad92-3f2afb51d5c8-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ac646565-cd5f-405a-ad92-3f2afb51d5c8" (UID: "ac646565-cd5f-405a-ad92-3f2afb51d5c8"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.317808 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac646565-cd5f-405a-ad92-3f2afb51d5c8-kube-api-access-9sgwc" (OuterVolumeSpecName: "kube-api-access-9sgwc") pod "ac646565-cd5f-405a-ad92-3f2afb51d5c8" (UID: "ac646565-cd5f-405a-ad92-3f2afb51d5c8"). InnerVolumeSpecName "kube-api-access-9sgwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.405423 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8b3f22b-652a-4703-bfdb-520d2f90867a-utilities\") pod \"redhat-marketplace-jxdrs\" (UID: \"b8b3f22b-652a-4703-bfdb-520d2f90867a\") " pod="openshift-marketplace/redhat-marketplace-jxdrs" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.405470 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8b3f22b-652a-4703-bfdb-520d2f90867a-catalog-content\") pod \"redhat-marketplace-jxdrs\" (UID: \"b8b3f22b-652a-4703-bfdb-520d2f90867a\") " pod="openshift-marketplace/redhat-marketplace-jxdrs" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.405554 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.405586 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkzf2\" (UniqueName: \"kubernetes.io/projected/b8b3f22b-652a-4703-bfdb-520d2f90867a-kube-api-access-wkzf2\") pod \"redhat-marketplace-jxdrs\" (UID: \"b8b3f22b-652a-4703-bfdb-520d2f90867a\") " pod="openshift-marketplace/redhat-marketplace-jxdrs" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.405637 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9sgwc\" (UniqueName: \"kubernetes.io/projected/ac646565-cd5f-405a-ad92-3f2afb51d5c8-kube-api-access-9sgwc\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.405648 4906 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ac646565-cd5f-405a-ad92-3f2afb51d5c8-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.405659 4906 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ac646565-cd5f-405a-ad92-3f2afb51d5c8-config-volume\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:12 crc kubenswrapper[4906]: E0227 08:32:12.406075 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:12.906058624 +0000 UTC m=+231.300460244 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.507786 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.508453 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8b3f22b-652a-4703-bfdb-520d2f90867a-utilities\") pod \"redhat-marketplace-jxdrs\" (UID: \"b8b3f22b-652a-4703-bfdb-520d2f90867a\") " pod="openshift-marketplace/redhat-marketplace-jxdrs" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.508491 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8b3f22b-652a-4703-bfdb-520d2f90867a-catalog-content\") pod \"redhat-marketplace-jxdrs\" (UID: \"b8b3f22b-652a-4703-bfdb-520d2f90867a\") " pod="openshift-marketplace/redhat-marketplace-jxdrs" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.508600 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkzf2\" (UniqueName: \"kubernetes.io/projected/b8b3f22b-652a-4703-bfdb-520d2f90867a-kube-api-access-wkzf2\") pod \"redhat-marketplace-jxdrs\" (UID: \"b8b3f22b-652a-4703-bfdb-520d2f90867a\") " pod="openshift-marketplace/redhat-marketplace-jxdrs" Feb 27 08:32:12 crc kubenswrapper[4906]: E0227 08:32:12.509105 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:13.009059631 +0000 UTC m=+231.403461261 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.509406 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8b3f22b-652a-4703-bfdb-520d2f90867a-utilities\") pod \"redhat-marketplace-jxdrs\" (UID: \"b8b3f22b-652a-4703-bfdb-520d2f90867a\") " pod="openshift-marketplace/redhat-marketplace-jxdrs" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.510489 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8b3f22b-652a-4703-bfdb-520d2f90867a-catalog-content\") pod \"redhat-marketplace-jxdrs\" (UID: \"b8b3f22b-652a-4703-bfdb-520d2f90867a\") " pod="openshift-marketplace/redhat-marketplace-jxdrs" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.524261 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"407da64d-d9bf-4d67-b741-37fa71780563","Type":"ContainerStarted","Data":"3180747bb0d139a225ff441581ea395f61a9886fe61e9a45491543d7dbbc8d8b"} Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.524333 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"407da64d-d9bf-4d67-b741-37fa71780563","Type":"ContainerStarted","Data":"0e01521fb2266984b150c98976b82a129e61b2c84da4b98afed6252ff7962eda"} Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.566339 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkzf2\" (UniqueName: \"kubernetes.io/projected/b8b3f22b-652a-4703-bfdb-520d2f90867a-kube-api-access-wkzf2\") pod \"redhat-marketplace-jxdrs\" (UID: \"b8b3f22b-652a-4703-bfdb-520d2f90867a\") " pod="openshift-marketplace/redhat-marketplace-jxdrs" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.566400 4906 generic.go:334] "Generic (PLEG): container finished" podID="f9c97127-8fdd-40b2-8248-40df8c50e302" containerID="a49cda8c7e70fe90b08a40b0ab4d23bf0ee8494b17f8ce65f97a5f153975a489" exitCode=0 Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.573353 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hw96v" event={"ID":"f9c97127-8fdd-40b2-8248-40df8c50e302","Type":"ContainerDied","Data":"a49cda8c7e70fe90b08a40b0ab4d23bf0ee8494b17f8ce65f97a5f153975a489"} Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.584378 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.584346027 podStartE2EDuration="2.584346027s" podCreationTimestamp="2026-02-27 08:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:12.582050782 +0000 UTC m=+230.976452392" watchObservedRunningTime="2026-02-27 08:32:12.584346027 +0000 UTC m=+230.978747637" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.594790 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pfbgw" event={"ID":"d81cc2f1-f6bc-454c-a927-973bf6bc452b","Type":"ContainerStarted","Data":"caac9ad3fb20410454f1a45a6952b25c93cc8e8114930175bfadddc18b5c1050"} Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.609744 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:12 crc kubenswrapper[4906]: E0227 08:32:12.610292 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:13.110272037 +0000 UTC m=+231.504673647 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.614947 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jxdrs" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.630322 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" event={"ID":"1f4e9433-9a0f-4432-bfbe-b98fe89961bd","Type":"ContainerStarted","Data":"0a4fd55f88ea88adeb4e05f71470986634db334129824d9d927049768617ecae"} Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.651988 4906 generic.go:334] "Generic (PLEG): container finished" podID="86ce64fc-356d-4172-b0c0-8074921dc727" containerID="c87b08fe2aeea170a41468a9766d1d6df7eaa5554e894312987a7211fa2d68e6" exitCode=0 Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.652116 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t5l4n" event={"ID":"86ce64fc-356d-4172-b0c0-8074921dc727","Type":"ContainerDied","Data":"c87b08fe2aeea170a41468a9766d1d6df7eaa5554e894312987a7211fa2d68e6"} Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.710593 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.712849 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx" Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.713737 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx" event={"ID":"ac646565-cd5f-405a-ad92-3f2afb51d5c8","Type":"ContainerDied","Data":"d18be7cb7825e3c3141d7415901e36856b2af5f1a2f6afb7a3037bee3e70fa7c"} Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.713772 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d18be7cb7825e3c3141d7415901e36856b2af5f1a2f6afb7a3037bee3e70fa7c" Feb 27 08:32:12 crc kubenswrapper[4906]: E0227 08:32:12.713942 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:13.21383338 +0000 UTC m=+231.608235000 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.729016 4906 generic.go:334] "Generic (PLEG): container finished" podID="2604542b-6cc4-44dc-ab74-f493ac742db9" containerID="02fb7827baa0e19728b6fc202d364fbaa766878959e49d0a47d13128d90702fb" exitCode=0 Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.729149 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zn5pm" event={"ID":"2604542b-6cc4-44dc-ab74-f493ac742db9","Type":"ContainerDied","Data":"02fb7827baa0e19728b6fc202d364fbaa766878959e49d0a47d13128d90702fb"} Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.729186 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zn5pm" event={"ID":"2604542b-6cc4-44dc-ab74-f493ac742db9","Type":"ContainerStarted","Data":"a919fd27d94ba36cac40777783c534cbbd4fb2b2eba5bf623c26d96c9bb84fbe"} Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.739794 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"52d06262-85a7-4b1c-bd88-f1f16acafff5","Type":"ContainerStarted","Data":"1797e3a603085c49221525241be8a38fc748bc08a7725ef85a577f60a4efa5eb"} Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.821335 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:12 crc kubenswrapper[4906]: E0227 08:32:12.828483 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:13.328438878 +0000 UTC m=+231.722840488 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.922736 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:12 crc kubenswrapper[4906]: E0227 08:32:12.925652 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:13.425608138 +0000 UTC m=+231.820010028 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.947683 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 27 08:32:12 crc kubenswrapper[4906]: [-]has-synced failed: reason withheld Feb 27 08:32:12 crc kubenswrapper[4906]: [+]process-running ok Feb 27 08:32:12 crc kubenswrapper[4906]: healthz check failed Feb 27 08:32:12 crc kubenswrapper[4906]: I0227 08:32:12.947802 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.027541 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:13 crc kubenswrapper[4906]: E0227 08:32:13.028103 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:13.52807436 +0000 UTC m=+231.922475970 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.053032 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rtknh"] Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.056064 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rtknh" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.063070 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.076899 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rtknh"] Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.105563 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-hhgws" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.117596 4906 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.128319 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:13 crc kubenswrapper[4906]: E0227 08:32:13.129170 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:13.628837503 +0000 UTC m=+232.023239113 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.167440 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-knxlc"] Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.172935 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jxdrs"] Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.231181 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hwfk\" (UniqueName: \"kubernetes.io/projected/5deb1490-b634-484c-a7ea-56f3ee6cad31-kube-api-access-2hwfk\") pod \"redhat-operators-rtknh\" (UID: \"5deb1490-b634-484c-a7ea-56f3ee6cad31\") " pod="openshift-marketplace/redhat-operators-rtknh" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.231722 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5deb1490-b634-484c-a7ea-56f3ee6cad31-utilities\") pod \"redhat-operators-rtknh\" (UID: \"5deb1490-b634-484c-a7ea-56f3ee6cad31\") " pod="openshift-marketplace/redhat-operators-rtknh" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.231799 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.231922 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5deb1490-b634-484c-a7ea-56f3ee6cad31-catalog-content\") pod \"redhat-operators-rtknh\" (UID: \"5deb1490-b634-484c-a7ea-56f3ee6cad31\") " pod="openshift-marketplace/redhat-operators-rtknh" Feb 27 08:32:13 crc kubenswrapper[4906]: E0227 08:32:13.233588 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:13.73356621 +0000 UTC m=+232.127967820 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:13 crc kubenswrapper[4906]: W0227 08:32:13.289351 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfe1433ef_5f77_4598_bd41_4cd2da2a8bd7.slice/crio-be4cc70e989bfca66a059409ebd33b2db77fbd6bb5dab36d43e64832f9f53f31 WatchSource:0}: Error finding container be4cc70e989bfca66a059409ebd33b2db77fbd6bb5dab36d43e64832f9f53f31: Status 404 returned error can't find the container with id be4cc70e989bfca66a059409ebd33b2db77fbd6bb5dab36d43e64832f9f53f31 Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.333361 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.333658 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hwfk\" (UniqueName: \"kubernetes.io/projected/5deb1490-b634-484c-a7ea-56f3ee6cad31-kube-api-access-2hwfk\") pod \"redhat-operators-rtknh\" (UID: \"5deb1490-b634-484c-a7ea-56f3ee6cad31\") " pod="openshift-marketplace/redhat-operators-rtknh" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.333721 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5deb1490-b634-484c-a7ea-56f3ee6cad31-utilities\") pod \"redhat-operators-rtknh\" (UID: \"5deb1490-b634-484c-a7ea-56f3ee6cad31\") " pod="openshift-marketplace/redhat-operators-rtknh" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.333769 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5deb1490-b634-484c-a7ea-56f3ee6cad31-catalog-content\") pod \"redhat-operators-rtknh\" (UID: \"5deb1490-b634-484c-a7ea-56f3ee6cad31\") " pod="openshift-marketplace/redhat-operators-rtknh" Feb 27 08:32:13 crc kubenswrapper[4906]: E0227 08:32:13.334326 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:13.834309072 +0000 UTC m=+232.228710682 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.391669 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5deb1490-b634-484c-a7ea-56f3ee6cad31-catalog-content\") pod \"redhat-operators-rtknh\" (UID: \"5deb1490-b634-484c-a7ea-56f3ee6cad31\") " pod="openshift-marketplace/redhat-operators-rtknh" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.393230 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5deb1490-b634-484c-a7ea-56f3ee6cad31-utilities\") pod \"redhat-operators-rtknh\" (UID: \"5deb1490-b634-484c-a7ea-56f3ee6cad31\") " pod="openshift-marketplace/redhat-operators-rtknh" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.427548 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hwfk\" (UniqueName: \"kubernetes.io/projected/5deb1490-b634-484c-a7ea-56f3ee6cad31-kube-api-access-2hwfk\") pod \"redhat-operators-rtknh\" (UID: \"5deb1490-b634-484c-a7ea-56f3ee6cad31\") " pod="openshift-marketplace/redhat-operators-rtknh" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.435075 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:13 crc kubenswrapper[4906]: E0227 08:32:13.435629 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:13.935611821 +0000 UTC m=+232.330013431 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.464565 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zb9z6"] Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.466180 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zb9z6" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.501742 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zb9z6"] Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.536898 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:13 crc kubenswrapper[4906]: E0227 08:32:13.539251 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:14.039216715 +0000 UTC m=+232.433618325 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.573017 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rtknh" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.640953 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjt79\" (UniqueName: \"kubernetes.io/projected/e44e0cc5-2800-489c-9bd8-0f06f15adfca-kube-api-access-xjt79\") pod \"redhat-operators-zb9z6\" (UID: \"e44e0cc5-2800-489c-9bd8-0f06f15adfca\") " pod="openshift-marketplace/redhat-operators-zb9z6" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.641019 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e44e0cc5-2800-489c-9bd8-0f06f15adfca-utilities\") pod \"redhat-operators-zb9z6\" (UID: \"e44e0cc5-2800-489c-9bd8-0f06f15adfca\") " pod="openshift-marketplace/redhat-operators-zb9z6" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.641091 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.641115 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e44e0cc5-2800-489c-9bd8-0f06f15adfca-catalog-content\") pod \"redhat-operators-zb9z6\" (UID: \"e44e0cc5-2800-489c-9bd8-0f06f15adfca\") " pod="openshift-marketplace/redhat-operators-zb9z6" Feb 27 08:32:13 crc kubenswrapper[4906]: E0227 08:32:13.641475 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:14.141460091 +0000 UTC m=+232.535861701 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.741687 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:13 crc kubenswrapper[4906]: E0227 08:32:13.741822 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:14.241798162 +0000 UTC m=+232.636199772 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.742123 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.742158 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e44e0cc5-2800-489c-9bd8-0f06f15adfca-catalog-content\") pod \"redhat-operators-zb9z6\" (UID: \"e44e0cc5-2800-489c-9bd8-0f06f15adfca\") " pod="openshift-marketplace/redhat-operators-zb9z6" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.742197 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjt79\" (UniqueName: \"kubernetes.io/projected/e44e0cc5-2800-489c-9bd8-0f06f15adfca-kube-api-access-xjt79\") pod \"redhat-operators-zb9z6\" (UID: \"e44e0cc5-2800-489c-9bd8-0f06f15adfca\") " pod="openshift-marketplace/redhat-operators-zb9z6" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.742225 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e44e0cc5-2800-489c-9bd8-0f06f15adfca-utilities\") pod \"redhat-operators-zb9z6\" (UID: \"e44e0cc5-2800-489c-9bd8-0f06f15adfca\") " pod="openshift-marketplace/redhat-operators-zb9z6" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.742667 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e44e0cc5-2800-489c-9bd8-0f06f15adfca-utilities\") pod \"redhat-operators-zb9z6\" (UID: \"e44e0cc5-2800-489c-9bd8-0f06f15adfca\") " pod="openshift-marketplace/redhat-operators-zb9z6" Feb 27 08:32:13 crc kubenswrapper[4906]: E0227 08:32:13.742800 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:14.24278123 +0000 UTC m=+232.637182840 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.742811 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e44e0cc5-2800-489c-9bd8-0f06f15adfca-catalog-content\") pod \"redhat-operators-zb9z6\" (UID: \"e44e0cc5-2800-489c-9bd8-0f06f15adfca\") " pod="openshift-marketplace/redhat-operators-zb9z6" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.759279 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" event={"ID":"1f4e9433-9a0f-4432-bfbe-b98fe89961bd","Type":"ContainerStarted","Data":"90fe2c69aa8fa02eff061a2202338a69efc9ebf9990fba098095422251cbe375"} Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.767911 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjt79\" (UniqueName: \"kubernetes.io/projected/e44e0cc5-2800-489c-9bd8-0f06f15adfca-kube-api-access-xjt79\") pod \"redhat-operators-zb9z6\" (UID: \"e44e0cc5-2800-489c-9bd8-0f06f15adfca\") " pod="openshift-marketplace/redhat-operators-zb9z6" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.768581 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"52d06262-85a7-4b1c-bd88-f1f16acafff5","Type":"ContainerStarted","Data":"d959c0b45abb273a83a55ed249ee69f490e2393ed826401947d525e39c4a65c8"} Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.771839 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-knxlc" event={"ID":"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7","Type":"ContainerStarted","Data":"be4cc70e989bfca66a059409ebd33b2db77fbd6bb5dab36d43e64832f9f53f31"} Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.775567 4906 generic.go:334] "Generic (PLEG): container finished" podID="407da64d-d9bf-4d67-b741-37fa71780563" containerID="3180747bb0d139a225ff441581ea395f61a9886fe61e9a45491543d7dbbc8d8b" exitCode=0 Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.775650 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"407da64d-d9bf-4d67-b741-37fa71780563","Type":"ContainerDied","Data":"3180747bb0d139a225ff441581ea395f61a9886fe61e9a45491543d7dbbc8d8b"} Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.778120 4906 generic.go:334] "Generic (PLEG): container finished" podID="d81cc2f1-f6bc-454c-a927-973bf6bc452b" containerID="04355062341bb18ed31119d0177680a8ffa261ae3fef81af61630416bda9c593" exitCode=0 Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.778192 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pfbgw" event={"ID":"d81cc2f1-f6bc-454c-a927-973bf6bc452b","Type":"ContainerDied","Data":"04355062341bb18ed31119d0177680a8ffa261ae3fef81af61630416bda9c593"} Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.780693 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxdrs" event={"ID":"b8b3f22b-652a-4703-bfdb-520d2f90867a","Type":"ContainerStarted","Data":"026330a52e0969e4a44f0f10323f8bdc196044cdae219be48ce84cd5f5a000fc"} Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.791951 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zb9z6" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.844142 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:13 crc kubenswrapper[4906]: E0227 08:32:13.845022 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-27 08:32:14.344989875 +0000 UTC m=+232.739391475 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.941025 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 27 08:32:13 crc kubenswrapper[4906]: [-]has-synced failed: reason withheld Feb 27 08:32:13 crc kubenswrapper[4906]: [+]process-running ok Feb 27 08:32:13 crc kubenswrapper[4906]: healthz check failed Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.941103 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 27 08:32:13 crc kubenswrapper[4906]: I0227 08:32:13.946182 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:13 crc kubenswrapper[4906]: E0227 08:32:13.946693 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-27 08:32:14.446673504 +0000 UTC m=+232.841075124 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-km47t" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.003783 4906 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-02-27T08:32:13.117635894Z","Handler":null,"Name":""} Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.014733 4906 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.014947 4906 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.050183 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.058904 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.116895 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rtknh"] Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.149571 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zb9z6"] Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.161505 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.243034 4906 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.243086 4906 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.280105 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-km47t\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.344096 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.564664 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.607285 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-km47t"] Feb 27 08:32:14 crc kubenswrapper[4906]: W0227 08:32:14.618759 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda7a26f83_d59b_4375_bcb0_89b52426dae7.slice/crio-b3c4d00f92df2d4203d8930c6b443aa3fdc2b9ab062e6a8977f125ff9a142872 WatchSource:0}: Error finding container b3c4d00f92df2d4203d8930c6b443aa3fdc2b9ab062e6a8977f125ff9a142872: Status 404 returned error can't find the container with id b3c4d00f92df2d4203d8930c6b443aa3fdc2b9ab062e6a8977f125ff9a142872 Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.790166 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rtknh" event={"ID":"5deb1490-b634-484c-a7ea-56f3ee6cad31","Type":"ContainerStarted","Data":"d1c4a18f0eeb6e23969040685f2be6701dd1cb78268c06aff5db1c9fbd2bad00"} Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.793062 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-km47t" event={"ID":"a7a26f83-d59b-4375-bcb0-89b52426dae7","Type":"ContainerStarted","Data":"b3c4d00f92df2d4203d8930c6b443aa3fdc2b9ab062e6a8977f125ff9a142872"} Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.794625 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zb9z6" event={"ID":"e44e0cc5-2800-489c-9bd8-0f06f15adfca","Type":"ContainerStarted","Data":"3b41d1e48adc6e116fd4639711d06364678bce6079d671512848c3a9862b7be5"} Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.812194 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=4.812175573 podStartE2EDuration="4.812175573s" podCreationTimestamp="2026-02-27 08:32:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:14.810605678 +0000 UTC m=+233.205007288" watchObservedRunningTime="2026-02-27 08:32:14.812175573 +0000 UTC m=+233.206577183" Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.886289 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.891573 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-zl4pc" Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.961118 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 27 08:32:14 crc kubenswrapper[4906]: [-]has-synced failed: reason withheld Feb 27 08:32:14 crc kubenswrapper[4906]: [+]process-running ok Feb 27 08:32:14 crc kubenswrapper[4906]: healthz check failed Feb 27 08:32:14 crc kubenswrapper[4906]: I0227 08:32:14.961184 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 27 08:32:15 crc kubenswrapper[4906]: I0227 08:32:15.093487 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 27 08:32:15 crc kubenswrapper[4906]: I0227 08:32:15.282448 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/407da64d-d9bf-4d67-b741-37fa71780563-kube-api-access\") pod \"407da64d-d9bf-4d67-b741-37fa71780563\" (UID: \"407da64d-d9bf-4d67-b741-37fa71780563\") " Feb 27 08:32:15 crc kubenswrapper[4906]: I0227 08:32:15.282523 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/407da64d-d9bf-4d67-b741-37fa71780563-kubelet-dir\") pod \"407da64d-d9bf-4d67-b741-37fa71780563\" (UID: \"407da64d-d9bf-4d67-b741-37fa71780563\") " Feb 27 08:32:15 crc kubenswrapper[4906]: I0227 08:32:15.282689 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/407da64d-d9bf-4d67-b741-37fa71780563-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "407da64d-d9bf-4d67-b741-37fa71780563" (UID: "407da64d-d9bf-4d67-b741-37fa71780563"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:32:15 crc kubenswrapper[4906]: I0227 08:32:15.283068 4906 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/407da64d-d9bf-4d67-b741-37fa71780563-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:15 crc kubenswrapper[4906]: I0227 08:32:15.294198 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/407da64d-d9bf-4d67-b741-37fa71780563-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "407da64d-d9bf-4d67-b741-37fa71780563" (UID: "407da64d-d9bf-4d67-b741-37fa71780563"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:32:15 crc kubenswrapper[4906]: I0227 08:32:15.384371 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/407da64d-d9bf-4d67-b741-37fa71780563-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:15 crc kubenswrapper[4906]: I0227 08:32:15.803490 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"407da64d-d9bf-4d67-b741-37fa71780563","Type":"ContainerDied","Data":"0e01521fb2266984b150c98976b82a129e61b2c84da4b98afed6252ff7962eda"} Feb 27 08:32:15 crc kubenswrapper[4906]: I0227 08:32:15.803539 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0e01521fb2266984b150c98976b82a129e61b2c84da4b98afed6252ff7962eda" Feb 27 08:32:15 crc kubenswrapper[4906]: I0227 08:32:15.803520 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 27 08:32:15 crc kubenswrapper[4906]: I0227 08:32:15.806264 4906 generic.go:334] "Generic (PLEG): container finished" podID="b8b3f22b-652a-4703-bfdb-520d2f90867a" containerID="3d1158c22bc587aecf1924be0ab2c18b1a34506b725f280816176a81ff536947" exitCode=0 Feb 27 08:32:15 crc kubenswrapper[4906]: I0227 08:32:15.806339 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxdrs" event={"ID":"b8b3f22b-652a-4703-bfdb-520d2f90867a","Type":"ContainerDied","Data":"3d1158c22bc587aecf1924be0ab2c18b1a34506b725f280816176a81ff536947"} Feb 27 08:32:15 crc kubenswrapper[4906]: I0227 08:32:15.809668 4906 generic.go:334] "Generic (PLEG): container finished" podID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" containerID="7042a6bc1e79ce29f0caa106466997b1b5d388b72e2b4561b207254ddac68a3b" exitCode=0 Feb 27 08:32:15 crc kubenswrapper[4906]: I0227 08:32:15.809710 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-knxlc" event={"ID":"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7","Type":"ContainerDied","Data":"7042a6bc1e79ce29f0caa106466997b1b5d388b72e2b4561b207254ddac68a3b"} Feb 27 08:32:15 crc kubenswrapper[4906]: I0227 08:32:15.939161 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 27 08:32:15 crc kubenswrapper[4906]: [-]has-synced failed: reason withheld Feb 27 08:32:15 crc kubenswrapper[4906]: [+]process-running ok Feb 27 08:32:15 crc kubenswrapper[4906]: healthz check failed Feb 27 08:32:15 crc kubenswrapper[4906]: I0227 08:32:15.939285 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 27 08:32:16 crc kubenswrapper[4906]: I0227 08:32:16.130830 4906 ???:1] "http: TLS handshake error from 192.168.126.11:48310: no serving certificate available for the kubelet" Feb 27 08:32:16 crc kubenswrapper[4906]: I0227 08:32:16.817318 4906 generic.go:334] "Generic (PLEG): container finished" podID="52d06262-85a7-4b1c-bd88-f1f16acafff5" containerID="d959c0b45abb273a83a55ed249ee69f490e2393ed826401947d525e39c4a65c8" exitCode=0 Feb 27 08:32:16 crc kubenswrapper[4906]: I0227 08:32:16.817370 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"52d06262-85a7-4b1c-bd88-f1f16acafff5","Type":"ContainerDied","Data":"d959c0b45abb273a83a55ed249ee69f490e2393ed826401947d525e39c4a65c8"} Feb 27 08:32:16 crc kubenswrapper[4906]: I0227 08:32:16.936752 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 27 08:32:16 crc kubenswrapper[4906]: [-]has-synced failed: reason withheld Feb 27 08:32:16 crc kubenswrapper[4906]: [+]process-running ok Feb 27 08:32:16 crc kubenswrapper[4906]: healthz check failed Feb 27 08:32:16 crc kubenswrapper[4906]: I0227 08:32:16.936818 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 27 08:32:17 crc kubenswrapper[4906]: I0227 08:32:17.007813 4906 ???:1] "http: TLS handshake error from 192.168.126.11:48312: no serving certificate available for the kubelet" Feb 27 08:32:17 crc kubenswrapper[4906]: I0227 08:32:17.824642 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rtknh" event={"ID":"5deb1490-b634-484c-a7ea-56f3ee6cad31","Type":"ContainerStarted","Data":"1c73eb8e76d2340cba03dbfd1309382ebc7cedb5d18e9b987d0a9c974c6cc599"} Feb 27 08:32:17 crc kubenswrapper[4906]: I0227 08:32:17.826688 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-km47t" event={"ID":"a7a26f83-d59b-4375-bcb0-89b52426dae7","Type":"ContainerStarted","Data":"b1f08ed83565ec4e80bac7cbe1426b4eaa0a0bab42d37a6b5a34a52b99843948"} Feb 27 08:32:17 crc kubenswrapper[4906]: I0227 08:32:17.829665 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" event={"ID":"1f4e9433-9a0f-4432-bfbe-b98fe89961bd","Type":"ContainerStarted","Data":"4bed2f639768139c43a44342cee28bbe7dda77e98f002f688398c700b96c62ed"} Feb 27 08:32:17 crc kubenswrapper[4906]: I0227 08:32:17.937543 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 27 08:32:17 crc kubenswrapper[4906]: [-]has-synced failed: reason withheld Feb 27 08:32:17 crc kubenswrapper[4906]: [+]process-running ok Feb 27 08:32:17 crc kubenswrapper[4906]: healthz check failed Feb 27 08:32:17 crc kubenswrapper[4906]: I0227 08:32:17.937666 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 27 08:32:18 crc kubenswrapper[4906]: I0227 08:32:18.077120 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 27 08:32:18 crc kubenswrapper[4906]: I0227 08:32:18.224540 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/52d06262-85a7-4b1c-bd88-f1f16acafff5-kubelet-dir\") pod \"52d06262-85a7-4b1c-bd88-f1f16acafff5\" (UID: \"52d06262-85a7-4b1c-bd88-f1f16acafff5\") " Feb 27 08:32:18 crc kubenswrapper[4906]: I0227 08:32:18.225127 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/52d06262-85a7-4b1c-bd88-f1f16acafff5-kube-api-access\") pod \"52d06262-85a7-4b1c-bd88-f1f16acafff5\" (UID: \"52d06262-85a7-4b1c-bd88-f1f16acafff5\") " Feb 27 08:32:18 crc kubenswrapper[4906]: I0227 08:32:18.224846 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/52d06262-85a7-4b1c-bd88-f1f16acafff5-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "52d06262-85a7-4b1c-bd88-f1f16acafff5" (UID: "52d06262-85a7-4b1c-bd88-f1f16acafff5"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:32:18 crc kubenswrapper[4906]: I0227 08:32:18.231896 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52d06262-85a7-4b1c-bd88-f1f16acafff5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "52d06262-85a7-4b1c-bd88-f1f16acafff5" (UID: "52d06262-85a7-4b1c-bd88-f1f16acafff5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:32:18 crc kubenswrapper[4906]: I0227 08:32:18.326938 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/52d06262-85a7-4b1c-bd88-f1f16acafff5-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:18 crc kubenswrapper[4906]: I0227 08:32:18.327002 4906 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/52d06262-85a7-4b1c-bd88-f1f16acafff5-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:18 crc kubenswrapper[4906]: I0227 08:32:18.841871 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"52d06262-85a7-4b1c-bd88-f1f16acafff5","Type":"ContainerDied","Data":"1797e3a603085c49221525241be8a38fc748bc08a7725ef85a577f60a4efa5eb"} Feb 27 08:32:18 crc kubenswrapper[4906]: I0227 08:32:18.841994 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1797e3a603085c49221525241be8a38fc748bc08a7725ef85a577f60a4efa5eb" Feb 27 08:32:18 crc kubenswrapper[4906]: I0227 08:32:18.842203 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 27 08:32:18 crc kubenswrapper[4906]: I0227 08:32:18.844406 4906 generic.go:334] "Generic (PLEG): container finished" podID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" containerID="9f333fdb2ded6e82989ca607d8550981b1d2f8d7fe1743bfb7d97edc458672ae" exitCode=0 Feb 27 08:32:18 crc kubenswrapper[4906]: I0227 08:32:18.844492 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zb9z6" event={"ID":"e44e0cc5-2800-489c-9bd8-0f06f15adfca","Type":"ContainerDied","Data":"9f333fdb2ded6e82989ca607d8550981b1d2f8d7fe1743bfb7d97edc458672ae"} Feb 27 08:32:18 crc kubenswrapper[4906]: I0227 08:32:18.846960 4906 generic.go:334] "Generic (PLEG): container finished" podID="5deb1490-b634-484c-a7ea-56f3ee6cad31" containerID="1c73eb8e76d2340cba03dbfd1309382ebc7cedb5d18e9b987d0a9c974c6cc599" exitCode=0 Feb 27 08:32:18 crc kubenswrapper[4906]: I0227 08:32:18.848081 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rtknh" event={"ID":"5deb1490-b634-484c-a7ea-56f3ee6cad31","Type":"ContainerDied","Data":"1c73eb8e76d2340cba03dbfd1309382ebc7cedb5d18e9b987d0a9c974c6cc599"} Feb 27 08:32:18 crc kubenswrapper[4906]: I0227 08:32:18.939438 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 27 08:32:18 crc kubenswrapper[4906]: [-]has-synced failed: reason withheld Feb 27 08:32:18 crc kubenswrapper[4906]: [+]process-running ok Feb 27 08:32:18 crc kubenswrapper[4906]: healthz check failed Feb 27 08:32:18 crc kubenswrapper[4906]: I0227 08:32:18.939552 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 27 08:32:19 crc kubenswrapper[4906]: I0227 08:32:19.860006 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:19 crc kubenswrapper[4906]: I0227 08:32:19.929330 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-rzl5p" podStartSLOduration=21.929308517 podStartE2EDuration="21.929308517s" podCreationTimestamp="2026-02-27 08:31:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:19.925696564 +0000 UTC m=+238.320098174" watchObservedRunningTime="2026-02-27 08:32:19.929308517 +0000 UTC m=+238.323710137" Feb 27 08:32:19 crc kubenswrapper[4906]: I0227 08:32:19.944303 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 27 08:32:19 crc kubenswrapper[4906]: [-]has-synced failed: reason withheld Feb 27 08:32:19 crc kubenswrapper[4906]: [+]process-running ok Feb 27 08:32:19 crc kubenswrapper[4906]: healthz check failed Feb 27 08:32:19 crc kubenswrapper[4906]: I0227 08:32:19.944394 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 27 08:32:19 crc kubenswrapper[4906]: I0227 08:32:19.958679 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-km47t" podStartSLOduration=190.958653314 podStartE2EDuration="3m10.958653314s" podCreationTimestamp="2026-02-27 08:29:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:32:19.957318336 +0000 UTC m=+238.351719956" watchObservedRunningTime="2026-02-27 08:32:19.958653314 +0000 UTC m=+238.353054944" Feb 27 08:32:20 crc kubenswrapper[4906]: I0227 08:32:20.798244 4906 patch_prober.go:28] interesting pod/console-f9d7485db-xx8qm container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.22:8443/health\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Feb 27 08:32:20 crc kubenswrapper[4906]: I0227 08:32:20.798321 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-xx8qm" podUID="a861f5dc-100c-443f-ab72-ecfe71895998" containerName="console" probeResult="failure" output="Get \"https://10.217.0.22:8443/health\": dial tcp 10.217.0.22:8443: connect: connection refused" Feb 27 08:32:20 crc kubenswrapper[4906]: I0227 08:32:20.937550 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 27 08:32:20 crc kubenswrapper[4906]: [-]has-synced failed: reason withheld Feb 27 08:32:20 crc kubenswrapper[4906]: [+]process-running ok Feb 27 08:32:20 crc kubenswrapper[4906]: healthz check failed Feb 27 08:32:20 crc kubenswrapper[4906]: I0227 08:32:20.937698 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 27 08:32:21 crc kubenswrapper[4906]: I0227 08:32:21.125837 4906 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkv6l container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Feb 27 08:32:21 crc kubenswrapper[4906]: I0227 08:32:21.125926 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Feb 27 08:32:21 crc kubenswrapper[4906]: I0227 08:32:21.126709 4906 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkv6l container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Feb 27 08:32:21 crc kubenswrapper[4906]: I0227 08:32:21.126836 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Feb 27 08:32:21 crc kubenswrapper[4906]: I0227 08:32:21.939269 4906 patch_prober.go:28] interesting pod/router-default-5444994796-bm27t container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 27 08:32:21 crc kubenswrapper[4906]: [-]has-synced failed: reason withheld Feb 27 08:32:21 crc kubenswrapper[4906]: [+]process-running ok Feb 27 08:32:21 crc kubenswrapper[4906]: healthz check failed Feb 27 08:32:21 crc kubenswrapper[4906]: I0227 08:32:21.939356 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-bm27t" podUID="db9e745a-c371-4b93-91ba-f755f3d4929d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 27 08:32:22 crc kubenswrapper[4906]: I0227 08:32:22.937831 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:32:22 crc kubenswrapper[4906]: I0227 08:32:22.940705 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-bm27t" Feb 27 08:32:24 crc kubenswrapper[4906]: I0227 08:32:24.844478 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:32:24 crc kubenswrapper[4906]: I0227 08:32:24.844543 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:32:25 crc kubenswrapper[4906]: I0227 08:32:25.777949 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-696ff5488b-54ph8"] Feb 27 08:32:25 crc kubenswrapper[4906]: I0227 08:32:25.778462 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" podUID="f5136073-8bfc-4abe-a2ad-20531b058203" containerName="controller-manager" containerID="cri-o://a80f96fc131a706fcb5b28cdeb2ec72ec0bb09f5067c29ace3b4face17766476" gracePeriod=30 Feb 27 08:32:25 crc kubenswrapper[4906]: I0227 08:32:25.807822 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22"] Feb 27 08:32:25 crc kubenswrapper[4906]: I0227 08:32:25.808134 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" podUID="b63290f0-e4e3-4bc7-a863-86e2498b84a9" containerName="route-controller-manager" containerID="cri-o://ee76aa46b4c8bc71a3738399d0eaf1df26b129197711c86d9b50990f6820c8d0" gracePeriod=30 Feb 27 08:32:26 crc kubenswrapper[4906]: I0227 08:32:26.408417 4906 ???:1] "http: TLS handshake error from 192.168.126.11:55984: no serving certificate available for the kubelet" Feb 27 08:32:26 crc kubenswrapper[4906]: I0227 08:32:26.907595 4906 generic.go:334] "Generic (PLEG): container finished" podID="f5136073-8bfc-4abe-a2ad-20531b058203" containerID="a80f96fc131a706fcb5b28cdeb2ec72ec0bb09f5067c29ace3b4face17766476" exitCode=0 Feb 27 08:32:26 crc kubenswrapper[4906]: I0227 08:32:26.908799 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" event={"ID":"f5136073-8bfc-4abe-a2ad-20531b058203","Type":"ContainerDied","Data":"a80f96fc131a706fcb5b28cdeb2ec72ec0bb09f5067c29ace3b4face17766476"} Feb 27 08:32:26 crc kubenswrapper[4906]: I0227 08:32:26.910466 4906 generic.go:334] "Generic (PLEG): container finished" podID="b63290f0-e4e3-4bc7-a863-86e2498b84a9" containerID="ee76aa46b4c8bc71a3738399d0eaf1df26b129197711c86d9b50990f6820c8d0" exitCode=0 Feb 27 08:32:26 crc kubenswrapper[4906]: I0227 08:32:26.910524 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" event={"ID":"b63290f0-e4e3-4bc7-a863-86e2498b84a9","Type":"ContainerDied","Data":"ee76aa46b4c8bc71a3738399d0eaf1df26b129197711c86d9b50990f6820c8d0"} Feb 27 08:32:28 crc kubenswrapper[4906]: I0227 08:32:28.926836 4906 patch_prober.go:28] interesting pod/controller-manager-696ff5488b-54ph8 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.46:8443/healthz\": dial tcp 10.217.0.46:8443: connect: connection refused" start-of-body= Feb 27 08:32:28 crc kubenswrapper[4906]: I0227 08:32:28.927259 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" podUID="f5136073-8bfc-4abe-a2ad-20531b058203" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.46:8443/healthz\": dial tcp 10.217.0.46:8443: connect: connection refused" Feb 27 08:32:29 crc kubenswrapper[4906]: I0227 08:32:29.261125 4906 patch_prober.go:28] interesting pod/route-controller-manager-5b65f5bd49-4kv22 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.47:8443/healthz\": dial tcp 10.217.0.47:8443: connect: connection refused" start-of-body= Feb 27 08:32:29 crc kubenswrapper[4906]: I0227 08:32:29.261291 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" podUID="b63290f0-e4e3-4bc7-a863-86e2498b84a9" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.47:8443/healthz\": dial tcp 10.217.0.47:8443: connect: connection refused" Feb 27 08:32:30 crc kubenswrapper[4906]: I0227 08:32:30.804307 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:30 crc kubenswrapper[4906]: I0227 08:32:30.819558 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:32:31 crc kubenswrapper[4906]: I0227 08:32:31.124587 4906 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkv6l container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Feb 27 08:32:31 crc kubenswrapper[4906]: I0227 08:32:31.124672 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Feb 27 08:32:31 crc kubenswrapper[4906]: I0227 08:32:31.124710 4906 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkv6l container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Feb 27 08:32:31 crc kubenswrapper[4906]: I0227 08:32:31.124796 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Feb 27 08:32:31 crc kubenswrapper[4906]: I0227 08:32:31.124741 4906 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-hkv6l" Feb 27 08:32:31 crc kubenswrapper[4906]: I0227 08:32:31.125643 4906 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkv6l container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Feb 27 08:32:31 crc kubenswrapper[4906]: I0227 08:32:31.125698 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Feb 27 08:32:31 crc kubenswrapper[4906]: I0227 08:32:31.126004 4906 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"bbeba7f0c4895b9188071d9fede0c5b5988e09953b165a0ea4235f4fd24250d3"} pod="openshift-console/downloads-7954f5f757-hkv6l" containerMessage="Container download-server failed liveness probe, will be restarted" Feb 27 08:32:31 crc kubenswrapper[4906]: I0227 08:32:31.126066 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" containerID="cri-o://bbeba7f0c4895b9188071d9fede0c5b5988e09953b165a0ea4235f4fd24250d3" gracePeriod=2 Feb 27 08:32:31 crc kubenswrapper[4906]: I0227 08:32:31.949130 4906 generic.go:334] "Generic (PLEG): container finished" podID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerID="bbeba7f0c4895b9188071d9fede0c5b5988e09953b165a0ea4235f4fd24250d3" exitCode=0 Feb 27 08:32:31 crc kubenswrapper[4906]: I0227 08:32:31.949187 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-hkv6l" event={"ID":"f3cc181a-c108-493a-87fa-9bf76f81b062","Type":"ContainerDied","Data":"bbeba7f0c4895b9188071d9fede0c5b5988e09953b165a0ea4235f4fd24250d3"} Feb 27 08:32:34 crc kubenswrapper[4906]: I0227 08:32:34.350028 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.360284 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.369597 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.406476 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-8557bd684c-w96ww"] Feb 27 08:32:37 crc kubenswrapper[4906]: E0227 08:32:37.406939 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52d06262-85a7-4b1c-bd88-f1f16acafff5" containerName="pruner" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.406960 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="52d06262-85a7-4b1c-bd88-f1f16acafff5" containerName="pruner" Feb 27 08:32:37 crc kubenswrapper[4906]: E0227 08:32:37.406974 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5136073-8bfc-4abe-a2ad-20531b058203" containerName="controller-manager" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.406982 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5136073-8bfc-4abe-a2ad-20531b058203" containerName="controller-manager" Feb 27 08:32:37 crc kubenswrapper[4906]: E0227 08:32:37.406994 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b63290f0-e4e3-4bc7-a863-86e2498b84a9" containerName="route-controller-manager" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.407001 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="b63290f0-e4e3-4bc7-a863-86e2498b84a9" containerName="route-controller-manager" Feb 27 08:32:37 crc kubenswrapper[4906]: E0227 08:32:37.407024 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="407da64d-d9bf-4d67-b741-37fa71780563" containerName="pruner" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.407030 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="407da64d-d9bf-4d67-b741-37fa71780563" containerName="pruner" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.407149 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="b63290f0-e4e3-4bc7-a863-86e2498b84a9" containerName="route-controller-manager" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.407162 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="52d06262-85a7-4b1c-bd88-f1f16acafff5" containerName="pruner" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.407171 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5136073-8bfc-4abe-a2ad-20531b058203" containerName="controller-manager" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.407178 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="407da64d-d9bf-4d67-b741-37fa71780563" containerName="pruner" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.407740 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.409817 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-8557bd684c-w96ww"] Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.464153 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f5136073-8bfc-4abe-a2ad-20531b058203-proxy-ca-bundles\") pod \"f5136073-8bfc-4abe-a2ad-20531b058203\" (UID: \"f5136073-8bfc-4abe-a2ad-20531b058203\") " Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.464373 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66gwr\" (UniqueName: \"kubernetes.io/projected/f5136073-8bfc-4abe-a2ad-20531b058203-kube-api-access-66gwr\") pod \"f5136073-8bfc-4abe-a2ad-20531b058203\" (UID: \"f5136073-8bfc-4abe-a2ad-20531b058203\") " Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.464412 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5136073-8bfc-4abe-a2ad-20531b058203-client-ca\") pod \"f5136073-8bfc-4abe-a2ad-20531b058203\" (UID: \"f5136073-8bfc-4abe-a2ad-20531b058203\") " Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.464487 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5136073-8bfc-4abe-a2ad-20531b058203-config\") pod \"f5136073-8bfc-4abe-a2ad-20531b058203\" (UID: \"f5136073-8bfc-4abe-a2ad-20531b058203\") " Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.464523 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5136073-8bfc-4abe-a2ad-20531b058203-serving-cert\") pod \"f5136073-8bfc-4abe-a2ad-20531b058203\" (UID: \"f5136073-8bfc-4abe-a2ad-20531b058203\") " Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.465554 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5136073-8bfc-4abe-a2ad-20531b058203-client-ca" (OuterVolumeSpecName: "client-ca") pod "f5136073-8bfc-4abe-a2ad-20531b058203" (UID: "f5136073-8bfc-4abe-a2ad-20531b058203"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.465668 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5136073-8bfc-4abe-a2ad-20531b058203-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "f5136073-8bfc-4abe-a2ad-20531b058203" (UID: "f5136073-8bfc-4abe-a2ad-20531b058203"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.466181 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5136073-8bfc-4abe-a2ad-20531b058203-config" (OuterVolumeSpecName: "config") pod "f5136073-8bfc-4abe-a2ad-20531b058203" (UID: "f5136073-8bfc-4abe-a2ad-20531b058203"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.473284 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5136073-8bfc-4abe-a2ad-20531b058203-kube-api-access-66gwr" (OuterVolumeSpecName: "kube-api-access-66gwr") pod "f5136073-8bfc-4abe-a2ad-20531b058203" (UID: "f5136073-8bfc-4abe-a2ad-20531b058203"). InnerVolumeSpecName "kube-api-access-66gwr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.475218 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5136073-8bfc-4abe-a2ad-20531b058203-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f5136073-8bfc-4abe-a2ad-20531b058203" (UID: "f5136073-8bfc-4abe-a2ad-20531b058203"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.566251 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b63290f0-e4e3-4bc7-a863-86e2498b84a9-serving-cert\") pod \"b63290f0-e4e3-4bc7-a863-86e2498b84a9\" (UID: \"b63290f0-e4e3-4bc7-a863-86e2498b84a9\") " Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.566371 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ksww\" (UniqueName: \"kubernetes.io/projected/b63290f0-e4e3-4bc7-a863-86e2498b84a9-kube-api-access-4ksww\") pod \"b63290f0-e4e3-4bc7-a863-86e2498b84a9\" (UID: \"b63290f0-e4e3-4bc7-a863-86e2498b84a9\") " Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.566438 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b63290f0-e4e3-4bc7-a863-86e2498b84a9-config\") pod \"b63290f0-e4e3-4bc7-a863-86e2498b84a9\" (UID: \"b63290f0-e4e3-4bc7-a863-86e2498b84a9\") " Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.566519 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b63290f0-e4e3-4bc7-a863-86e2498b84a9-client-ca\") pod \"b63290f0-e4e3-4bc7-a863-86e2498b84a9\" (UID: \"b63290f0-e4e3-4bc7-a863-86e2498b84a9\") " Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.566684 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/97263860-3ad0-4b78-9960-d8c7e0c3a806-client-ca\") pod \"controller-manager-8557bd684c-w96ww\" (UID: \"97263860-3ad0-4b78-9960-d8c7e0c3a806\") " pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.566724 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/97263860-3ad0-4b78-9960-d8c7e0c3a806-serving-cert\") pod \"controller-manager-8557bd684c-w96ww\" (UID: \"97263860-3ad0-4b78-9960-d8c7e0c3a806\") " pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.566752 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/97263860-3ad0-4b78-9960-d8c7e0c3a806-proxy-ca-bundles\") pod \"controller-manager-8557bd684c-w96ww\" (UID: \"97263860-3ad0-4b78-9960-d8c7e0c3a806\") " pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.566775 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hgjqp\" (UniqueName: \"kubernetes.io/projected/97263860-3ad0-4b78-9960-d8c7e0c3a806-kube-api-access-hgjqp\") pod \"controller-manager-8557bd684c-w96ww\" (UID: \"97263860-3ad0-4b78-9960-d8c7e0c3a806\") " pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.566915 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97263860-3ad0-4b78-9960-d8c7e0c3a806-config\") pod \"controller-manager-8557bd684c-w96ww\" (UID: \"97263860-3ad0-4b78-9960-d8c7e0c3a806\") " pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.567004 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66gwr\" (UniqueName: \"kubernetes.io/projected/f5136073-8bfc-4abe-a2ad-20531b058203-kube-api-access-66gwr\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.567178 4906 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5136073-8bfc-4abe-a2ad-20531b058203-client-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.567237 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5136073-8bfc-4abe-a2ad-20531b058203-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.567254 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5136073-8bfc-4abe-a2ad-20531b058203-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.567317 4906 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f5136073-8bfc-4abe-a2ad-20531b058203-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.567514 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b63290f0-e4e3-4bc7-a863-86e2498b84a9-client-ca" (OuterVolumeSpecName: "client-ca") pod "b63290f0-e4e3-4bc7-a863-86e2498b84a9" (UID: "b63290f0-e4e3-4bc7-a863-86e2498b84a9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.567548 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b63290f0-e4e3-4bc7-a863-86e2498b84a9-config" (OuterVolumeSpecName: "config") pod "b63290f0-e4e3-4bc7-a863-86e2498b84a9" (UID: "b63290f0-e4e3-4bc7-a863-86e2498b84a9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.570100 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b63290f0-e4e3-4bc7-a863-86e2498b84a9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b63290f0-e4e3-4bc7-a863-86e2498b84a9" (UID: "b63290f0-e4e3-4bc7-a863-86e2498b84a9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.570130 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b63290f0-e4e3-4bc7-a863-86e2498b84a9-kube-api-access-4ksww" (OuterVolumeSpecName: "kube-api-access-4ksww") pod "b63290f0-e4e3-4bc7-a863-86e2498b84a9" (UID: "b63290f0-e4e3-4bc7-a863-86e2498b84a9"). InnerVolumeSpecName "kube-api-access-4ksww". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.668730 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/97263860-3ad0-4b78-9960-d8c7e0c3a806-client-ca\") pod \"controller-manager-8557bd684c-w96ww\" (UID: \"97263860-3ad0-4b78-9960-d8c7e0c3a806\") " pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.668855 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/97263860-3ad0-4b78-9960-d8c7e0c3a806-serving-cert\") pod \"controller-manager-8557bd684c-w96ww\" (UID: \"97263860-3ad0-4b78-9960-d8c7e0c3a806\") " pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.669013 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/97263860-3ad0-4b78-9960-d8c7e0c3a806-proxy-ca-bundles\") pod \"controller-manager-8557bd684c-w96ww\" (UID: \"97263860-3ad0-4b78-9960-d8c7e0c3a806\") " pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.669043 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hgjqp\" (UniqueName: \"kubernetes.io/projected/97263860-3ad0-4b78-9960-d8c7e0c3a806-kube-api-access-hgjqp\") pod \"controller-manager-8557bd684c-w96ww\" (UID: \"97263860-3ad0-4b78-9960-d8c7e0c3a806\") " pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.669081 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97263860-3ad0-4b78-9960-d8c7e0c3a806-config\") pod \"controller-manager-8557bd684c-w96ww\" (UID: \"97263860-3ad0-4b78-9960-d8c7e0c3a806\") " pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.669169 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ksww\" (UniqueName: \"kubernetes.io/projected/b63290f0-e4e3-4bc7-a863-86e2498b84a9-kube-api-access-4ksww\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.669187 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b63290f0-e4e3-4bc7-a863-86e2498b84a9-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.669199 4906 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b63290f0-e4e3-4bc7-a863-86e2498b84a9-client-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.669212 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b63290f0-e4e3-4bc7-a863-86e2498b84a9-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.670176 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/97263860-3ad0-4b78-9960-d8c7e0c3a806-client-ca\") pod \"controller-manager-8557bd684c-w96ww\" (UID: \"97263860-3ad0-4b78-9960-d8c7e0c3a806\") " pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.670534 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/97263860-3ad0-4b78-9960-d8c7e0c3a806-proxy-ca-bundles\") pod \"controller-manager-8557bd684c-w96ww\" (UID: \"97263860-3ad0-4b78-9960-d8c7e0c3a806\") " pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.670802 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97263860-3ad0-4b78-9960-d8c7e0c3a806-config\") pod \"controller-manager-8557bd684c-w96ww\" (UID: \"97263860-3ad0-4b78-9960-d8c7e0c3a806\") " pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.674974 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/97263860-3ad0-4b78-9960-d8c7e0c3a806-serving-cert\") pod \"controller-manager-8557bd684c-w96ww\" (UID: \"97263860-3ad0-4b78-9960-d8c7e0c3a806\") " pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.691198 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hgjqp\" (UniqueName: \"kubernetes.io/projected/97263860-3ad0-4b78-9960-d8c7e0c3a806-kube-api-access-hgjqp\") pod \"controller-manager-8557bd684c-w96ww\" (UID: \"97263860-3ad0-4b78-9960-d8c7e0c3a806\") " pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.728143 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.989045 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.989056 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22" event={"ID":"b63290f0-e4e3-4bc7-a863-86e2498b84a9","Type":"ContainerDied","Data":"4d7eb34770475b004b43352ced8d96f365a90f1713a848954e2b16df065501ca"} Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.989184 4906 scope.go:117] "RemoveContainer" containerID="ee76aa46b4c8bc71a3738399d0eaf1df26b129197711c86d9b50990f6820c8d0" Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.992652 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" event={"ID":"f5136073-8bfc-4abe-a2ad-20531b058203","Type":"ContainerDied","Data":"de714af42e53c67535c497c5b3784e4a8ad13ed25e83a2afd7b5282d2d784ca3"} Feb 27 08:32:37 crc kubenswrapper[4906]: I0227 08:32:37.992690 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-696ff5488b-54ph8" Feb 27 08:32:38 crc kubenswrapper[4906]: I0227 08:32:38.025223 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22"] Feb 27 08:32:38 crc kubenswrapper[4906]: I0227 08:32:38.028194 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5b65f5bd49-4kv22"] Feb 27 08:32:38 crc kubenswrapper[4906]: I0227 08:32:38.038438 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-696ff5488b-54ph8"] Feb 27 08:32:38 crc kubenswrapper[4906]: I0227 08:32:38.055626 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-696ff5488b-54ph8"] Feb 27 08:32:38 crc kubenswrapper[4906]: I0227 08:32:38.562661 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b63290f0-e4e3-4bc7-a863-86e2498b84a9" path="/var/lib/kubelet/pods/b63290f0-e4e3-4bc7-a863-86e2498b84a9/volumes" Feb 27 08:32:38 crc kubenswrapper[4906]: I0227 08:32:38.563214 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5136073-8bfc-4abe-a2ad-20531b058203" path="/var/lib/kubelet/pods/f5136073-8bfc-4abe-a2ad-20531b058203/volumes" Feb 27 08:32:39 crc kubenswrapper[4906]: E0227 08:32:39.624685 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/openshift4/ose-cli:latest" Feb 27 08:32:39 crc kubenswrapper[4906]: E0227 08:32:39.624922 4906 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 27 08:32:39 crc kubenswrapper[4906]: container &Container{Name:oc,Image:registry.redhat.io/openshift4/ose-cli:latest,Command:[/bin/bash -c oc get csr -o go-template='{{range .items}}{{if not .status}}{{.metadata.name}}{{"\n"}}{{end}}{{end}}' | xargs --no-run-if-empty oc adm certificate approve Feb 27 08:32:39 crc kubenswrapper[4906]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zhhwt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod auto-csr-approver-29536352-8fqhc_openshift-infra(582fc06a-0d1d-4260-a91f-af317ab278d9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled Feb 27 08:32:39 crc kubenswrapper[4906]: > logger="UnhandledError" Feb 27 08:32:39 crc kubenswrapper[4906]: E0227 08:32:39.626009 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oc\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-infra/auto-csr-approver-29536352-8fqhc" podUID="582fc06a-0d1d-4260-a91f-af317ab278d9" Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.639163 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl"] Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.640066 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.644205 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.644251 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.644292 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.644319 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.644463 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.644522 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.647493 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl"] Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.800618 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k76dg\" (UniqueName: \"kubernetes.io/projected/8c816c66-6926-496d-ada8-c8ed2e6ec215-kube-api-access-k76dg\") pod \"route-controller-manager-765d89cbdc-zwwfl\" (UID: \"8c816c66-6926-496d-ada8-c8ed2e6ec215\") " pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.800720 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c816c66-6926-496d-ada8-c8ed2e6ec215-config\") pod \"route-controller-manager-765d89cbdc-zwwfl\" (UID: \"8c816c66-6926-496d-ada8-c8ed2e6ec215\") " pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.800947 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c816c66-6926-496d-ada8-c8ed2e6ec215-serving-cert\") pod \"route-controller-manager-765d89cbdc-zwwfl\" (UID: \"8c816c66-6926-496d-ada8-c8ed2e6ec215\") " pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.801073 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8c816c66-6926-496d-ada8-c8ed2e6ec215-client-ca\") pod \"route-controller-manager-765d89cbdc-zwwfl\" (UID: \"8c816c66-6926-496d-ada8-c8ed2e6ec215\") " pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.902017 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k76dg\" (UniqueName: \"kubernetes.io/projected/8c816c66-6926-496d-ada8-c8ed2e6ec215-kube-api-access-k76dg\") pod \"route-controller-manager-765d89cbdc-zwwfl\" (UID: \"8c816c66-6926-496d-ada8-c8ed2e6ec215\") " pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.902114 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c816c66-6926-496d-ada8-c8ed2e6ec215-config\") pod \"route-controller-manager-765d89cbdc-zwwfl\" (UID: \"8c816c66-6926-496d-ada8-c8ed2e6ec215\") " pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.902152 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c816c66-6926-496d-ada8-c8ed2e6ec215-serving-cert\") pod \"route-controller-manager-765d89cbdc-zwwfl\" (UID: \"8c816c66-6926-496d-ada8-c8ed2e6ec215\") " pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.902200 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8c816c66-6926-496d-ada8-c8ed2e6ec215-client-ca\") pod \"route-controller-manager-765d89cbdc-zwwfl\" (UID: \"8c816c66-6926-496d-ada8-c8ed2e6ec215\") " pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.903168 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8c816c66-6926-496d-ada8-c8ed2e6ec215-client-ca\") pod \"route-controller-manager-765d89cbdc-zwwfl\" (UID: \"8c816c66-6926-496d-ada8-c8ed2e6ec215\") " pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.903406 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c816c66-6926-496d-ada8-c8ed2e6ec215-config\") pod \"route-controller-manager-765d89cbdc-zwwfl\" (UID: \"8c816c66-6926-496d-ada8-c8ed2e6ec215\") " pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.918232 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c816c66-6926-496d-ada8-c8ed2e6ec215-serving-cert\") pod \"route-controller-manager-765d89cbdc-zwwfl\" (UID: \"8c816c66-6926-496d-ada8-c8ed2e6ec215\") " pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.920442 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k76dg\" (UniqueName: \"kubernetes.io/projected/8c816c66-6926-496d-ada8-c8ed2e6ec215-kube-api-access-k76dg\") pod \"route-controller-manager-765d89cbdc-zwwfl\" (UID: \"8c816c66-6926-496d-ada8-c8ed2e6ec215\") " pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" Feb 27 08:32:39 crc kubenswrapper[4906]: I0227 08:32:39.967200 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" Feb 27 08:32:40 crc kubenswrapper[4906]: E0227 08:32:40.007028 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oc\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift4/ose-cli:latest\\\"\"" pod="openshift-infra/auto-csr-approver-29536352-8fqhc" podUID="582fc06a-0d1d-4260-a91f-af317ab278d9" Feb 27 08:32:40 crc kubenswrapper[4906]: E0227 08:32:40.191320 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/openshift4/ose-cli:latest" Feb 27 08:32:40 crc kubenswrapper[4906]: E0227 08:32:40.191465 4906 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 27 08:32:40 crc kubenswrapper[4906]: container &Container{Name:oc,Image:registry.redhat.io/openshift4/ose-cli:latest,Command:[/bin/bash -c oc get csr -o go-template='{{range .items}}{{if not .status}}{{.metadata.name}}{{"\n"}}{{end}}{{end}}' | xargs --no-run-if-empty oc adm certificate approve Feb 27 08:32:40 crc kubenswrapper[4906]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-x9wgc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod auto-csr-approver-29536350-85r2j_openshift-infra(f743212c-ed63-408e-8063-ed04c8a7a1a9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled Feb 27 08:32:40 crc kubenswrapper[4906]: > logger="UnhandledError" Feb 27 08:32:40 crc kubenswrapper[4906]: E0227 08:32:40.193458 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oc\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-infra/auto-csr-approver-29536350-85r2j" podUID="f743212c-ed63-408e-8063-ed04c8a7a1a9" Feb 27 08:32:41 crc kubenswrapper[4906]: E0227 08:32:41.011707 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oc\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift4/ose-cli:latest\\\"\"" pod="openshift-infra/auto-csr-approver-29536350-85r2j" podUID="f743212c-ed63-408e-8063-ed04c8a7a1a9" Feb 27 08:32:41 crc kubenswrapper[4906]: I0227 08:32:41.082134 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-mmsmb" Feb 27 08:32:41 crc kubenswrapper[4906]: I0227 08:32:41.132352 4906 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkv6l container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Feb 27 08:32:41 crc kubenswrapper[4906]: I0227 08:32:41.132920 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Feb 27 08:32:42 crc kubenswrapper[4906]: I0227 08:32:42.739075 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 27 08:32:44 crc kubenswrapper[4906]: I0227 08:32:44.920839 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 27 08:32:44 crc kubenswrapper[4906]: I0227 08:32:44.922411 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 27 08:32:44 crc kubenswrapper[4906]: I0227 08:32:44.924657 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Feb 27 08:32:44 crc kubenswrapper[4906]: I0227 08:32:44.925596 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Feb 27 08:32:44 crc kubenswrapper[4906]: I0227 08:32:44.936102 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 27 08:32:44 crc kubenswrapper[4906]: I0227 08:32:44.991217 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d0f57e44-e247-4a99-afe1-59214fc0f1bd-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d0f57e44-e247-4a99-afe1-59214fc0f1bd\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 27 08:32:44 crc kubenswrapper[4906]: I0227 08:32:44.991313 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d0f57e44-e247-4a99-afe1-59214fc0f1bd-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d0f57e44-e247-4a99-afe1-59214fc0f1bd\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 27 08:32:45 crc kubenswrapper[4906]: I0227 08:32:45.092613 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d0f57e44-e247-4a99-afe1-59214fc0f1bd-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d0f57e44-e247-4a99-afe1-59214fc0f1bd\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 27 08:32:45 crc kubenswrapper[4906]: I0227 08:32:45.092714 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d0f57e44-e247-4a99-afe1-59214fc0f1bd-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d0f57e44-e247-4a99-afe1-59214fc0f1bd\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 27 08:32:45 crc kubenswrapper[4906]: I0227 08:32:45.093349 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d0f57e44-e247-4a99-afe1-59214fc0f1bd-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"d0f57e44-e247-4a99-afe1-59214fc0f1bd\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 27 08:32:45 crc kubenswrapper[4906]: I0227 08:32:45.114672 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d0f57e44-e247-4a99-afe1-59214fc0f1bd-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"d0f57e44-e247-4a99-afe1-59214fc0f1bd\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 27 08:32:45 crc kubenswrapper[4906]: I0227 08:32:45.246297 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 27 08:32:45 crc kubenswrapper[4906]: I0227 08:32:45.778602 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-8557bd684c-w96ww"] Feb 27 08:32:45 crc kubenswrapper[4906]: I0227 08:32:45.871629 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl"] Feb 27 08:32:50 crc kubenswrapper[4906]: I0227 08:32:50.532520 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 27 08:32:50 crc kubenswrapper[4906]: I0227 08:32:50.534461 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 27 08:32:50 crc kubenswrapper[4906]: I0227 08:32:50.539513 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 27 08:32:50 crc kubenswrapper[4906]: I0227 08:32:50.573319 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a71a873c-34c6-45e8-9923-816350816e82-kube-api-access\") pod \"installer-9-crc\" (UID: \"a71a873c-34c6-45e8-9923-816350816e82\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 27 08:32:50 crc kubenswrapper[4906]: I0227 08:32:50.573409 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/a71a873c-34c6-45e8-9923-816350816e82-var-lock\") pod \"installer-9-crc\" (UID: \"a71a873c-34c6-45e8-9923-816350816e82\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 27 08:32:50 crc kubenswrapper[4906]: I0227 08:32:50.573473 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a71a873c-34c6-45e8-9923-816350816e82-kubelet-dir\") pod \"installer-9-crc\" (UID: \"a71a873c-34c6-45e8-9923-816350816e82\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 27 08:32:50 crc kubenswrapper[4906]: I0227 08:32:50.674482 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a71a873c-34c6-45e8-9923-816350816e82-kube-api-access\") pod \"installer-9-crc\" (UID: \"a71a873c-34c6-45e8-9923-816350816e82\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 27 08:32:50 crc kubenswrapper[4906]: I0227 08:32:50.674543 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/a71a873c-34c6-45e8-9923-816350816e82-var-lock\") pod \"installer-9-crc\" (UID: \"a71a873c-34c6-45e8-9923-816350816e82\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 27 08:32:50 crc kubenswrapper[4906]: I0227 08:32:50.674573 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a71a873c-34c6-45e8-9923-816350816e82-kubelet-dir\") pod \"installer-9-crc\" (UID: \"a71a873c-34c6-45e8-9923-816350816e82\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 27 08:32:50 crc kubenswrapper[4906]: I0227 08:32:50.674664 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a71a873c-34c6-45e8-9923-816350816e82-kubelet-dir\") pod \"installer-9-crc\" (UID: \"a71a873c-34c6-45e8-9923-816350816e82\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 27 08:32:50 crc kubenswrapper[4906]: I0227 08:32:50.674713 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/a71a873c-34c6-45e8-9923-816350816e82-var-lock\") pod \"installer-9-crc\" (UID: \"a71a873c-34c6-45e8-9923-816350816e82\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 27 08:32:50 crc kubenswrapper[4906]: I0227 08:32:50.697381 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a71a873c-34c6-45e8-9923-816350816e82-kube-api-access\") pod \"installer-9-crc\" (UID: \"a71a873c-34c6-45e8-9923-816350816e82\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 27 08:32:50 crc kubenswrapper[4906]: I0227 08:32:50.869000 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 27 08:32:51 crc kubenswrapper[4906]: I0227 08:32:51.125328 4906 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkv6l container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Feb 27 08:32:51 crc kubenswrapper[4906]: I0227 08:32:51.125415 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Feb 27 08:32:54 crc kubenswrapper[4906]: I0227 08:32:54.844411 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:32:54 crc kubenswrapper[4906]: I0227 08:32:54.845008 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:32:57 crc kubenswrapper[4906]: E0227 08:32:57.965237 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \"/var/tmp/container_images_storage1705303174/2\": happened during read: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Feb 27 08:32:57 crc kubenswrapper[4906]: E0227 08:32:57.966159 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tpxwg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-knxlc_openshift-marketplace(fe1433ef-5f77-4598-bd41-4cd2da2a8bd7): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \"/var/tmp/container_images_storage1705303174/2\": happened during read: context canceled" logger="UnhandledError" Feb 27 08:32:57 crc kubenswrapper[4906]: E0227 08:32:57.967436 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \\\"/var/tmp/container_images_storage1705303174/2\\\": happened during read: context canceled\"" pod="openshift-marketplace/redhat-marketplace-knxlc" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" Feb 27 08:33:01 crc kubenswrapper[4906]: I0227 08:33:01.124778 4906 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkv6l container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Feb 27 08:33:05 crc kubenswrapper[4906]: I0227 08:33:01.124874 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Feb 27 08:33:05 crc kubenswrapper[4906]: E0227 08:33:05.036088 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \"/var/tmp/container_images_storage1542817037/2\": happened during read: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Feb 27 08:33:05 crc kubenswrapper[4906]: E0227 08:33:05.036639 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xjt79,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-zb9z6_openshift-marketplace(e44e0cc5-2800-489c-9bd8-0f06f15adfca): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \"/var/tmp/container_images_storage1542817037/2\": happened during read: context canceled" logger="UnhandledError" Feb 27 08:33:05 crc kubenswrapper[4906]: E0227 08:33:05.037854 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: writing blob: storing blob to file \\\"/var/tmp/container_images_storage1542817037/2\\\": happened during read: context canceled\"" pod="openshift-marketplace/redhat-operators-zb9z6" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" Feb 27 08:33:05 crc kubenswrapper[4906]: E0227 08:33:05.362785 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Feb 27 08:33:05 crc kubenswrapper[4906]: E0227 08:33:05.363557 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qblcj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-pfbgw_openshift-marketplace(d81cc2f1-f6bc-454c-a927-973bf6bc452b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 27 08:33:05 crc kubenswrapper[4906]: E0227 08:33:05.365066 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-pfbgw" podUID="d81cc2f1-f6bc-454c-a927-973bf6bc452b" Feb 27 08:33:07 crc kubenswrapper[4906]: I0227 08:33:07.397076 4906 ???:1] "http: TLS handshake error from 192.168.126.11:36944: no serving certificate available for the kubelet" Feb 27 08:33:11 crc kubenswrapper[4906]: I0227 08:33:11.124620 4906 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkv6l container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Feb 27 08:33:11 crc kubenswrapper[4906]: I0227 08:33:11.125135 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Feb 27 08:33:12 crc kubenswrapper[4906]: E0227 08:33:12.412772 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-pfbgw" podUID="d81cc2f1-f6bc-454c-a927-973bf6bc452b" Feb 27 08:33:12 crc kubenswrapper[4906]: E0227 08:33:12.476109 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Feb 27 08:33:12 crc kubenswrapper[4906]: E0227 08:33:12.476348 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2hwfk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-rtknh_openshift-marketplace(5deb1490-b634-484c-a7ea-56f3ee6cad31): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 27 08:33:12 crc kubenswrapper[4906]: E0227 08:33:12.477809 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-rtknh" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" Feb 27 08:33:15 crc kubenswrapper[4906]: E0227 08:33:15.147332 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-rtknh" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" Feb 27 08:33:15 crc kubenswrapper[4906]: E0227 08:33:15.194668 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Feb 27 08:33:15 crc kubenswrapper[4906]: E0227 08:33:15.194957 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wkzf2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-jxdrs_openshift-marketplace(b8b3f22b-652a-4703-bfdb-520d2f90867a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 27 08:33:15 crc kubenswrapper[4906]: E0227 08:33:15.196866 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-jxdrs" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" Feb 27 08:33:19 crc kubenswrapper[4906]: E0227 08:33:19.700865 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Feb 27 08:33:19 crc kubenswrapper[4906]: E0227 08:33:19.701728 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4w5lx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-hw96v_openshift-marketplace(f9c97127-8fdd-40b2-8248-40df8c50e302): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 27 08:33:19 crc kubenswrapper[4906]: E0227 08:33:19.703116 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-hw96v" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" Feb 27 08:33:21 crc kubenswrapper[4906]: I0227 08:33:21.124415 4906 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkv6l container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Feb 27 08:33:21 crc kubenswrapper[4906]: I0227 08:33:21.124513 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Feb 27 08:33:24 crc kubenswrapper[4906]: I0227 08:33:24.844840 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:33:24 crc kubenswrapper[4906]: I0227 08:33:24.845395 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:33:24 crc kubenswrapper[4906]: I0227 08:33:24.845951 4906 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:33:24 crc kubenswrapper[4906]: I0227 08:33:24.846774 4906 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca"} pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 27 08:33:24 crc kubenswrapper[4906]: I0227 08:33:24.846837 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" containerID="cri-o://4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca" gracePeriod=600 Feb 27 08:33:24 crc kubenswrapper[4906]: E0227 08:33:24.965131 4906 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfc2f1b1e_37c4_45c1_8f9c_221faf5b777d.slice/crio-4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca.scope\": RecentStats: unable to find data in memory cache]" Feb 27 08:33:27 crc kubenswrapper[4906]: I0227 08:33:27.306344 4906 generic.go:334] "Generic (PLEG): container finished" podID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerID="4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca" exitCode=0 Feb 27 08:33:27 crc kubenswrapper[4906]: I0227 08:33:27.306412 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerDied","Data":"4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca"} Feb 27 08:33:27 crc kubenswrapper[4906]: E0227 08:33:27.757941 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-hw96v" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" Feb 27 08:33:27 crc kubenswrapper[4906]: I0227 08:33:27.769178 4906 scope.go:117] "RemoveContainer" containerID="a80f96fc131a706fcb5b28cdeb2ec72ec0bb09f5067c29ace3b4face17766476" Feb 27 08:33:27 crc kubenswrapper[4906]: E0227 08:33:27.782715 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Feb 27 08:33:27 crc kubenswrapper[4906]: E0227 08:33:27.782922 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8n7kz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-t5l4n_openshift-marketplace(86ce64fc-356d-4172-b0c0-8074921dc727): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 27 08:33:27 crc kubenswrapper[4906]: E0227 08:33:27.784964 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-t5l4n" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" Feb 27 08:33:27 crc kubenswrapper[4906]: E0227 08:33:27.802937 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/openshift4/ose-cli:latest" Feb 27 08:33:27 crc kubenswrapper[4906]: E0227 08:33:27.803079 4906 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 27 08:33:27 crc kubenswrapper[4906]: container &Container{Name:oc,Image:registry.redhat.io/openshift4/ose-cli:latest,Command:[/bin/bash -c oc get csr -o go-template='{{range .items}}{{if not .status}}{{.metadata.name}}{{"\n"}}{{end}}{{end}}' | xargs --no-run-if-empty oc adm certificate approve Feb 27 08:33:27 crc kubenswrapper[4906]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-x9wgc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod auto-csr-approver-29536350-85r2j_openshift-infra(f743212c-ed63-408e-8063-ed04c8a7a1a9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled Feb 27 08:33:27 crc kubenswrapper[4906]: > logger="UnhandledError" Feb 27 08:33:27 crc kubenswrapper[4906]: E0227 08:33:27.803190 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/openshift4/ose-cli:latest" Feb 27 08:33:27 crc kubenswrapper[4906]: E0227 08:33:27.803420 4906 kuberuntime_manager.go:1274] "Unhandled Error" err=< Feb 27 08:33:27 crc kubenswrapper[4906]: container &Container{Name:oc,Image:registry.redhat.io/openshift4/ose-cli:latest,Command:[/bin/bash -c oc get csr -o go-template='{{range .items}}{{if not .status}}{{.metadata.name}}{{"\n"}}{{end}}{{end}}' | xargs --no-run-if-empty oc adm certificate approve Feb 27 08:33:27 crc kubenswrapper[4906]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zhhwt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod auto-csr-approver-29536352-8fqhc_openshift-infra(582fc06a-0d1d-4260-a91f-af317ab278d9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled Feb 27 08:33:27 crc kubenswrapper[4906]: > logger="UnhandledError" Feb 27 08:33:27 crc kubenswrapper[4906]: E0227 08:33:27.804215 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oc\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-infra/auto-csr-approver-29536350-85r2j" podUID="f743212c-ed63-408e-8063-ed04c8a7a1a9" Feb 27 08:33:27 crc kubenswrapper[4906]: E0227 08:33:27.810024 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oc\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-infra/auto-csr-approver-29536352-8fqhc" podUID="582fc06a-0d1d-4260-a91f-af317ab278d9" Feb 27 08:33:27 crc kubenswrapper[4906]: E0227 08:33:27.823396 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Feb 27 08:33:27 crc kubenswrapper[4906]: E0227 08:33:27.823631 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tpxwg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-knxlc_openshift-marketplace(fe1433ef-5f77-4598-bd41-4cd2da2a8bd7): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 27 08:33:27 crc kubenswrapper[4906]: E0227 08:33:27.825409 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-knxlc" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" Feb 27 08:33:28 crc kubenswrapper[4906]: I0227 08:33:28.075393 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl"] Feb 27 08:33:28 crc kubenswrapper[4906]: W0227 08:33:28.082049 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c816c66_6926_496d_ada8_c8ed2e6ec215.slice/crio-cf04aa4d4de2098f1e49cf587a6464c7f3dfee5a89a2c42cafe07b052245b3ce WatchSource:0}: Error finding container cf04aa4d4de2098f1e49cf587a6464c7f3dfee5a89a2c42cafe07b052245b3ce: Status 404 returned error can't find the container with id cf04aa4d4de2098f1e49cf587a6464c7f3dfee5a89a2c42cafe07b052245b3ce Feb 27 08:33:28 crc kubenswrapper[4906]: I0227 08:33:28.109142 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 27 08:33:28 crc kubenswrapper[4906]: I0227 08:33:28.315804 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"d0f57e44-e247-4a99-afe1-59214fc0f1bd","Type":"ContainerStarted","Data":"d26c044416690bcfb05729c2144db28eb4156f036782338d5f0fafbeacb6d5ca"} Feb 27 08:33:28 crc kubenswrapper[4906]: I0227 08:33:28.318402 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" event={"ID":"8c816c66-6926-496d-ada8-c8ed2e6ec215","Type":"ContainerStarted","Data":"cf04aa4d4de2098f1e49cf587a6464c7f3dfee5a89a2c42cafe07b052245b3ce"} Feb 27 08:33:28 crc kubenswrapper[4906]: E0227 08:33:28.320646 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-t5l4n" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" Feb 27 08:33:28 crc kubenswrapper[4906]: I0227 08:33:28.370971 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 27 08:33:28 crc kubenswrapper[4906]: I0227 08:33:28.387576 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-8557bd684c-w96ww"] Feb 27 08:33:28 crc kubenswrapper[4906]: W0227 08:33:28.397406 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod97263860_3ad0_4b78_9960_d8c7e0c3a806.slice/crio-387ba42acc855e21f5119cf9c85a6254d9f33f471bda73ea89ecd4fe9c794e94 WatchSource:0}: Error finding container 387ba42acc855e21f5119cf9c85a6254d9f33f471bda73ea89ecd4fe9c794e94: Status 404 returned error can't find the container with id 387ba42acc855e21f5119cf9c85a6254d9f33f471bda73ea89ecd4fe9c794e94 Feb 27 08:33:29 crc kubenswrapper[4906]: I0227 08:33:29.326910 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" event={"ID":"97263860-3ad0-4b78-9960-d8c7e0c3a806","Type":"ContainerStarted","Data":"387ba42acc855e21f5119cf9c85a6254d9f33f471bda73ea89ecd4fe9c794e94"} Feb 27 08:33:29 crc kubenswrapper[4906]: I0227 08:33:29.327843 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"a71a873c-34c6-45e8-9923-816350816e82","Type":"ContainerStarted","Data":"402f1bb26b8a0cd6203ab8f85f0b4cf603e6f08e4fb0bcd4a5e543af9be08f6e"} Feb 27 08:33:31 crc kubenswrapper[4906]: I0227 08:33:31.125190 4906 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkv6l container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Feb 27 08:33:31 crc kubenswrapper[4906]: I0227 08:33:31.125742 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Feb 27 08:33:31 crc kubenswrapper[4906]: I0227 08:33:31.344023 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"a71a873c-34c6-45e8-9923-816350816e82","Type":"ContainerStarted","Data":"a6aed1a233358b4939ac9c257f7b983907b9f8b97cc04d3016f6e5444c3eda48"} Feb 27 08:33:31 crc kubenswrapper[4906]: I0227 08:33:31.346298 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerStarted","Data":"432a2ec448d5afa76b89b67103131d632bfc1e942f1d1803d36030738d876711"} Feb 27 08:33:31 crc kubenswrapper[4906]: I0227 08:33:31.347988 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"d0f57e44-e247-4a99-afe1-59214fc0f1bd","Type":"ContainerStarted","Data":"f67e2395f56ceeaa022415b5f95834c4324e8d5ff1c10488db2b39f5ca388a7b"} Feb 27 08:33:31 crc kubenswrapper[4906]: I0227 08:33:31.349670 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" event={"ID":"8c816c66-6926-496d-ada8-c8ed2e6ec215","Type":"ContainerStarted","Data":"98ce6c996e71cf8e8d539b0937899e7c78540b309373ebaf7c91148ac09f7ef4"} Feb 27 08:33:31 crc kubenswrapper[4906]: I0227 08:33:31.349914 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" podUID="8c816c66-6926-496d-ada8-c8ed2e6ec215" containerName="route-controller-manager" containerID="cri-o://98ce6c996e71cf8e8d539b0937899e7c78540b309373ebaf7c91148ac09f7ef4" gracePeriod=30 Feb 27 08:33:31 crc kubenswrapper[4906]: I0227 08:33:31.352888 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-hkv6l" event={"ID":"f3cc181a-c108-493a-87fa-9bf76f81b062","Type":"ContainerStarted","Data":"d62b0acb71fbd6d634e6a251f7ed79a8da849b20df82a8ed2fc3bc9632d1b2cd"} Feb 27 08:33:31 crc kubenswrapper[4906]: I0227 08:33:31.354218 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" event={"ID":"97263860-3ad0-4b78-9960-d8c7e0c3a806","Type":"ContainerStarted","Data":"8bfc26267b21be7e94a931cf0a2235c49d1c82e38ecfdb41ea897c12762f26d6"} Feb 27 08:33:31 crc kubenswrapper[4906]: I0227 08:33:31.388071 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" podStartSLOduration=66.388042826 podStartE2EDuration="1m6.388042826s" podCreationTimestamp="2026-02-27 08:32:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:33:31.383438085 +0000 UTC m=+309.777839715" watchObservedRunningTime="2026-02-27 08:33:31.388042826 +0000 UTC m=+309.782444446" Feb 27 08:33:31 crc kubenswrapper[4906]: E0227 08:33:31.427043 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Feb 27 08:33:31 crc kubenswrapper[4906]: E0227 08:33:31.427231 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fcgrm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-zn5pm_openshift-marketplace(2604542b-6cc4-44dc-ab74-f493ac742db9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 27 08:33:31 crc kubenswrapper[4906]: E0227 08:33:31.430505 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-zn5pm" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" Feb 27 08:33:32 crc kubenswrapper[4906]: I0227 08:33:32.364938 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-765d89cbdc-zwwfl_8c816c66-6926-496d-ada8-c8ed2e6ec215/route-controller-manager/0.log" Feb 27 08:33:32 crc kubenswrapper[4906]: I0227 08:33:32.365944 4906 generic.go:334] "Generic (PLEG): container finished" podID="8c816c66-6926-496d-ada8-c8ed2e6ec215" containerID="98ce6c996e71cf8e8d539b0937899e7c78540b309373ebaf7c91148ac09f7ef4" exitCode=255 Feb 27 08:33:32 crc kubenswrapper[4906]: I0227 08:33:32.366067 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" event={"ID":"8c816c66-6926-496d-ada8-c8ed2e6ec215","Type":"ContainerDied","Data":"98ce6c996e71cf8e8d539b0937899e7c78540b309373ebaf7c91148ac09f7ef4"} Feb 27 08:33:32 crc kubenswrapper[4906]: I0227 08:33:32.370740 4906 generic.go:334] "Generic (PLEG): container finished" podID="d0f57e44-e247-4a99-afe1-59214fc0f1bd" containerID="f67e2395f56ceeaa022415b5f95834c4324e8d5ff1c10488db2b39f5ca388a7b" exitCode=0 Feb 27 08:33:32 crc kubenswrapper[4906]: I0227 08:33:32.370850 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"d0f57e44-e247-4a99-afe1-59214fc0f1bd","Type":"ContainerDied","Data":"f67e2395f56ceeaa022415b5f95834c4324e8d5ff1c10488db2b39f5ca388a7b"} Feb 27 08:33:32 crc kubenswrapper[4906]: I0227 08:33:32.371104 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" podUID="97263860-3ad0-4b78-9960-d8c7e0c3a806" containerName="controller-manager" containerID="cri-o://8bfc26267b21be7e94a931cf0a2235c49d1c82e38ecfdb41ea897c12762f26d6" gracePeriod=30 Feb 27 08:33:32 crc kubenswrapper[4906]: I0227 08:33:32.371906 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:33:32 crc kubenswrapper[4906]: I0227 08:33:32.376242 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-hkv6l" Feb 27 08:33:32 crc kubenswrapper[4906]: I0227 08:33:32.376861 4906 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkv6l container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Feb 27 08:33:32 crc kubenswrapper[4906]: I0227 08:33:32.377243 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Feb 27 08:33:32 crc kubenswrapper[4906]: I0227 08:33:32.381951 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:33:32 crc kubenswrapper[4906]: I0227 08:33:32.430498 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=42.430471554 podStartE2EDuration="42.430471554s" podCreationTimestamp="2026-02-27 08:32:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:33:32.429459515 +0000 UTC m=+310.823861145" watchObservedRunningTime="2026-02-27 08:33:32.430471554 +0000 UTC m=+310.824873164" Feb 27 08:33:32 crc kubenswrapper[4906]: I0227 08:33:32.451241 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" podStartSLOduration=67.451216553 podStartE2EDuration="1m7.451216553s" podCreationTimestamp="2026-02-27 08:32:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:33:32.449767411 +0000 UTC m=+310.844169031" watchObservedRunningTime="2026-02-27 08:33:32.451216553 +0000 UTC m=+310.845618173" Feb 27 08:33:33 crc kubenswrapper[4906]: E0227 08:33:33.247772 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-zn5pm" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.300276 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-765d89cbdc-zwwfl_8c816c66-6926-496d-ada8-c8ed2e6ec215/route-controller-manager/0.log" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.300355 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.332062 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb"] Feb 27 08:33:33 crc kubenswrapper[4906]: E0227 08:33:33.333214 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c816c66-6926-496d-ada8-c8ed2e6ec215" containerName="route-controller-manager" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.333246 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c816c66-6926-496d-ada8-c8ed2e6ec215" containerName="route-controller-manager" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.333439 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c816c66-6926-496d-ada8-c8ed2e6ec215" containerName="route-controller-manager" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.335655 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.342453 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb"] Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.366449 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8c816c66-6926-496d-ada8-c8ed2e6ec215-client-ca\") pod \"8c816c66-6926-496d-ada8-c8ed2e6ec215\" (UID: \"8c816c66-6926-496d-ada8-c8ed2e6ec215\") " Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.366501 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c816c66-6926-496d-ada8-c8ed2e6ec215-config\") pod \"8c816c66-6926-496d-ada8-c8ed2e6ec215\" (UID: \"8c816c66-6926-496d-ada8-c8ed2e6ec215\") " Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.366661 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c816c66-6926-496d-ada8-c8ed2e6ec215-serving-cert\") pod \"8c816c66-6926-496d-ada8-c8ed2e6ec215\" (UID: \"8c816c66-6926-496d-ada8-c8ed2e6ec215\") " Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.366762 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k76dg\" (UniqueName: \"kubernetes.io/projected/8c816c66-6926-496d-ada8-c8ed2e6ec215-kube-api-access-k76dg\") pod \"8c816c66-6926-496d-ada8-c8ed2e6ec215\" (UID: \"8c816c66-6926-496d-ada8-c8ed2e6ec215\") " Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.367024 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5w8gx\" (UniqueName: \"kubernetes.io/projected/fdba3d5b-a88a-48de-a3ab-7306545204f2-kube-api-access-5w8gx\") pod \"route-controller-manager-7dd7689756-vt2tb\" (UID: \"fdba3d5b-a88a-48de-a3ab-7306545204f2\") " pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.367063 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fdba3d5b-a88a-48de-a3ab-7306545204f2-client-ca\") pod \"route-controller-manager-7dd7689756-vt2tb\" (UID: \"fdba3d5b-a88a-48de-a3ab-7306545204f2\") " pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.367103 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdba3d5b-a88a-48de-a3ab-7306545204f2-config\") pod \"route-controller-manager-7dd7689756-vt2tb\" (UID: \"fdba3d5b-a88a-48de-a3ab-7306545204f2\") " pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.367273 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fdba3d5b-a88a-48de-a3ab-7306545204f2-serving-cert\") pod \"route-controller-manager-7dd7689756-vt2tb\" (UID: \"fdba3d5b-a88a-48de-a3ab-7306545204f2\") " pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.367435 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c816c66-6926-496d-ada8-c8ed2e6ec215-client-ca" (OuterVolumeSpecName: "client-ca") pod "8c816c66-6926-496d-ada8-c8ed2e6ec215" (UID: "8c816c66-6926-496d-ada8-c8ed2e6ec215"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.367647 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8c816c66-6926-496d-ada8-c8ed2e6ec215-config" (OuterVolumeSpecName: "config") pod "8c816c66-6926-496d-ada8-c8ed2e6ec215" (UID: "8c816c66-6926-496d-ada8-c8ed2e6ec215"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.377027 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c816c66-6926-496d-ada8-c8ed2e6ec215-kube-api-access-k76dg" (OuterVolumeSpecName: "kube-api-access-k76dg") pod "8c816c66-6926-496d-ada8-c8ed2e6ec215" (UID: "8c816c66-6926-496d-ada8-c8ed2e6ec215"). InnerVolumeSpecName "kube-api-access-k76dg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.388127 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8c816c66-6926-496d-ada8-c8ed2e6ec215-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8c816c66-6926-496d-ada8-c8ed2e6ec215" (UID: "8c816c66-6926-496d-ada8-c8ed2e6ec215"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.397362 4906 generic.go:334] "Generic (PLEG): container finished" podID="97263860-3ad0-4b78-9960-d8c7e0c3a806" containerID="8bfc26267b21be7e94a931cf0a2235c49d1c82e38ecfdb41ea897c12762f26d6" exitCode=0 Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.397434 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" event={"ID":"97263860-3ad0-4b78-9960-d8c7e0c3a806","Type":"ContainerDied","Data":"8bfc26267b21be7e94a931cf0a2235c49d1c82e38ecfdb41ea897c12762f26d6"} Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.401520 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-765d89cbdc-zwwfl_8c816c66-6926-496d-ada8-c8ed2e6ec215/route-controller-manager/0.log" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.402741 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" event={"ID":"8c816c66-6926-496d-ada8-c8ed2e6ec215","Type":"ContainerDied","Data":"cf04aa4d4de2098f1e49cf587a6464c7f3dfee5a89a2c42cafe07b052245b3ce"} Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.402805 4906 scope.go:117] "RemoveContainer" containerID="98ce6c996e71cf8e8d539b0937899e7c78540b309373ebaf7c91148ac09f7ef4" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.402977 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.403253 4906 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkv6l container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.403316 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.439302 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl"] Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.444505 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-765d89cbdc-zwwfl"] Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.468093 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdba3d5b-a88a-48de-a3ab-7306545204f2-config\") pod \"route-controller-manager-7dd7689756-vt2tb\" (UID: \"fdba3d5b-a88a-48de-a3ab-7306545204f2\") " pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.468549 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fdba3d5b-a88a-48de-a3ab-7306545204f2-serving-cert\") pod \"route-controller-manager-7dd7689756-vt2tb\" (UID: \"fdba3d5b-a88a-48de-a3ab-7306545204f2\") " pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.468746 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5w8gx\" (UniqueName: \"kubernetes.io/projected/fdba3d5b-a88a-48de-a3ab-7306545204f2-kube-api-access-5w8gx\") pod \"route-controller-manager-7dd7689756-vt2tb\" (UID: \"fdba3d5b-a88a-48de-a3ab-7306545204f2\") " pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.468902 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fdba3d5b-a88a-48de-a3ab-7306545204f2-client-ca\") pod \"route-controller-manager-7dd7689756-vt2tb\" (UID: \"fdba3d5b-a88a-48de-a3ab-7306545204f2\") " pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.469072 4906 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8c816c66-6926-496d-ada8-c8ed2e6ec215-client-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.469188 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c816c66-6926-496d-ada8-c8ed2e6ec215-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.469270 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c816c66-6926-496d-ada8-c8ed2e6ec215-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.469361 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k76dg\" (UniqueName: \"kubernetes.io/projected/8c816c66-6926-496d-ada8-c8ed2e6ec215-kube-api-access-k76dg\") on node \"crc\" DevicePath \"\"" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.470312 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fdba3d5b-a88a-48de-a3ab-7306545204f2-client-ca\") pod \"route-controller-manager-7dd7689756-vt2tb\" (UID: \"fdba3d5b-a88a-48de-a3ab-7306545204f2\") " pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.470283 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdba3d5b-a88a-48de-a3ab-7306545204f2-config\") pod \"route-controller-manager-7dd7689756-vt2tb\" (UID: \"fdba3d5b-a88a-48de-a3ab-7306545204f2\") " pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.475785 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fdba3d5b-a88a-48de-a3ab-7306545204f2-serving-cert\") pod \"route-controller-manager-7dd7689756-vt2tb\" (UID: \"fdba3d5b-a88a-48de-a3ab-7306545204f2\") " pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.491801 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5w8gx\" (UniqueName: \"kubernetes.io/projected/fdba3d5b-a88a-48de-a3ab-7306545204f2-kube-api-access-5w8gx\") pod \"route-controller-manager-7dd7689756-vt2tb\" (UID: \"fdba3d5b-a88a-48de-a3ab-7306545204f2\") " pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.662800 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.715184 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.773355 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d0f57e44-e247-4a99-afe1-59214fc0f1bd-kubelet-dir\") pod \"d0f57e44-e247-4a99-afe1-59214fc0f1bd\" (UID: \"d0f57e44-e247-4a99-afe1-59214fc0f1bd\") " Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.773504 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d0f57e44-e247-4a99-afe1-59214fc0f1bd-kube-api-access\") pod \"d0f57e44-e247-4a99-afe1-59214fc0f1bd\" (UID: \"d0f57e44-e247-4a99-afe1-59214fc0f1bd\") " Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.773503 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d0f57e44-e247-4a99-afe1-59214fc0f1bd-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "d0f57e44-e247-4a99-afe1-59214fc0f1bd" (UID: "d0f57e44-e247-4a99-afe1-59214fc0f1bd"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.773983 4906 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/d0f57e44-e247-4a99-afe1-59214fc0f1bd-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.778394 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d0f57e44-e247-4a99-afe1-59214fc0f1bd-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "d0f57e44-e247-4a99-afe1-59214fc0f1bd" (UID: "d0f57e44-e247-4a99-afe1-59214fc0f1bd"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:33:33 crc kubenswrapper[4906]: I0227 08:33:33.875340 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d0f57e44-e247-4a99-afe1-59214fc0f1bd-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.240027 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.290389 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/97263860-3ad0-4b78-9960-d8c7e0c3a806-proxy-ca-bundles\") pod \"97263860-3ad0-4b78-9960-d8c7e0c3a806\" (UID: \"97263860-3ad0-4b78-9960-d8c7e0c3a806\") " Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.290480 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/97263860-3ad0-4b78-9960-d8c7e0c3a806-serving-cert\") pod \"97263860-3ad0-4b78-9960-d8c7e0c3a806\" (UID: \"97263860-3ad0-4b78-9960-d8c7e0c3a806\") " Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.290528 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97263860-3ad0-4b78-9960-d8c7e0c3a806-config\") pod \"97263860-3ad0-4b78-9960-d8c7e0c3a806\" (UID: \"97263860-3ad0-4b78-9960-d8c7e0c3a806\") " Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.290563 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hgjqp\" (UniqueName: \"kubernetes.io/projected/97263860-3ad0-4b78-9960-d8c7e0c3a806-kube-api-access-hgjqp\") pod \"97263860-3ad0-4b78-9960-d8c7e0c3a806\" (UID: \"97263860-3ad0-4b78-9960-d8c7e0c3a806\") " Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.290602 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/97263860-3ad0-4b78-9960-d8c7e0c3a806-client-ca\") pod \"97263860-3ad0-4b78-9960-d8c7e0c3a806\" (UID: \"97263860-3ad0-4b78-9960-d8c7e0c3a806\") " Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.291685 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97263860-3ad0-4b78-9960-d8c7e0c3a806-client-ca" (OuterVolumeSpecName: "client-ca") pod "97263860-3ad0-4b78-9960-d8c7e0c3a806" (UID: "97263860-3ad0-4b78-9960-d8c7e0c3a806"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.292159 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97263860-3ad0-4b78-9960-d8c7e0c3a806-config" (OuterVolumeSpecName: "config") pod "97263860-3ad0-4b78-9960-d8c7e0c3a806" (UID: "97263860-3ad0-4b78-9960-d8c7e0c3a806"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.298174 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/97263860-3ad0-4b78-9960-d8c7e0c3a806-kube-api-access-hgjqp" (OuterVolumeSpecName: "kube-api-access-hgjqp") pod "97263860-3ad0-4b78-9960-d8c7e0c3a806" (UID: "97263860-3ad0-4b78-9960-d8c7e0c3a806"). InnerVolumeSpecName "kube-api-access-hgjqp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.298868 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/97263860-3ad0-4b78-9960-d8c7e0c3a806-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "97263860-3ad0-4b78-9960-d8c7e0c3a806" (UID: "97263860-3ad0-4b78-9960-d8c7e0c3a806"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.302469 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/97263860-3ad0-4b78-9960-d8c7e0c3a806-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "97263860-3ad0-4b78-9960-d8c7e0c3a806" (UID: "97263860-3ad0-4b78-9960-d8c7e0c3a806"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.395589 4906 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/97263860-3ad0-4b78-9960-d8c7e0c3a806-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.395646 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/97263860-3ad0-4b78-9960-d8c7e0c3a806-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.395664 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97263860-3ad0-4b78-9960-d8c7e0c3a806-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.395681 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hgjqp\" (UniqueName: \"kubernetes.io/projected/97263860-3ad0-4b78-9960-d8c7e0c3a806-kube-api-access-hgjqp\") on node \"crc\" DevicePath \"\"" Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.395695 4906 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/97263860-3ad0-4b78-9960-d8c7e0c3a806-client-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.414376 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" event={"ID":"97263860-3ad0-4b78-9960-d8c7e0c3a806","Type":"ContainerDied","Data":"387ba42acc855e21f5119cf9c85a6254d9f33f471bda73ea89ecd4fe9c794e94"} Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.414819 4906 scope.go:117] "RemoveContainer" containerID="8bfc26267b21be7e94a931cf0a2235c49d1c82e38ecfdb41ea897c12762f26d6" Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.416366 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-8557bd684c-w96ww" Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.419988 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"d0f57e44-e247-4a99-afe1-59214fc0f1bd","Type":"ContainerDied","Data":"d26c044416690bcfb05729c2144db28eb4156f036782338d5f0fafbeacb6d5ca"} Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.420039 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d26c044416690bcfb05729c2144db28eb4156f036782338d5f0fafbeacb6d5ca" Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.420124 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.464424 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-8557bd684c-w96ww"] Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.464489 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-8557bd684c-w96ww"] Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.561699 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c816c66-6926-496d-ada8-c8ed2e6ec215" path="/var/lib/kubelet/pods/8c816c66-6926-496d-ada8-c8ed2e6ec215/volumes" Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.562522 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="97263860-3ad0-4b78-9960-d8c7e0c3a806" path="/var/lib/kubelet/pods/97263860-3ad0-4b78-9960-d8c7e0c3a806/volumes" Feb 27 08:33:34 crc kubenswrapper[4906]: I0227 08:33:34.717917 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb"] Feb 27 08:33:34 crc kubenswrapper[4906]: W0227 08:33:34.720049 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfdba3d5b_a88a_48de_a3ab_7306545204f2.slice/crio-63d18ebd178f7ac04230ab83983cfbfe5328d18c4b1cb4259b8c9d3d7f536e01 WatchSource:0}: Error finding container 63d18ebd178f7ac04230ab83983cfbfe5328d18c4b1cb4259b8c9d3d7f536e01: Status 404 returned error can't find the container with id 63d18ebd178f7ac04230ab83983cfbfe5328d18c4b1cb4259b8c9d3d7f536e01 Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.427841 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zb9z6" event={"ID":"e44e0cc5-2800-489c-9bd8-0f06f15adfca","Type":"ContainerStarted","Data":"04c8ff27e5c84ce301dbc3b9ac86b5635c816102f10c04d05432050cb6b9bc84"} Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.431377 4906 generic.go:334] "Generic (PLEG): container finished" podID="d81cc2f1-f6bc-454c-a927-973bf6bc452b" containerID="b0c884e777223a0239f211a5f0b15d940fc6eba968f06c0c2be1be88ef7da39f" exitCode=0 Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.431526 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pfbgw" event={"ID":"d81cc2f1-f6bc-454c-a927-973bf6bc452b","Type":"ContainerDied","Data":"b0c884e777223a0239f211a5f0b15d940fc6eba968f06c0c2be1be88ef7da39f"} Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.437643 4906 generic.go:334] "Generic (PLEG): container finished" podID="b8b3f22b-652a-4703-bfdb-520d2f90867a" containerID="2df010299977b7c2aa23c58bf59b7d640b305100e1f5e17c55901135c86719c2" exitCode=0 Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.437778 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxdrs" event={"ID":"b8b3f22b-652a-4703-bfdb-520d2f90867a","Type":"ContainerDied","Data":"2df010299977b7c2aa23c58bf59b7d640b305100e1f5e17c55901135c86719c2"} Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.449043 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rtknh" event={"ID":"5deb1490-b634-484c-a7ea-56f3ee6cad31","Type":"ContainerStarted","Data":"e9a566d1539ad42da990b4e2135ce2702c6c25f51d62ff1f6ef27dccfeea652a"} Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.450839 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" event={"ID":"fdba3d5b-a88a-48de-a3ab-7306545204f2","Type":"ContainerStarted","Data":"62b4e0b014ebe1da7dd7d861f68db7b7d04265dc91934c6e3a43cd727c95a4bc"} Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.450865 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" event={"ID":"fdba3d5b-a88a-48de-a3ab-7306545204f2","Type":"ContainerStarted","Data":"63d18ebd178f7ac04230ab83983cfbfe5328d18c4b1cb4259b8c9d3d7f536e01"} Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.451230 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.548051 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" podStartSLOduration=50.54802518 podStartE2EDuration="50.54802518s" podCreationTimestamp="2026-02-27 08:32:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:33:35.542482503 +0000 UTC m=+313.936884143" watchObservedRunningTime="2026-02-27 08:33:35.54802518 +0000 UTC m=+313.942426790" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.639830 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.681729 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5b699f89b4-tsf9z"] Feb 27 08:33:35 crc kubenswrapper[4906]: E0227 08:33:35.682013 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="97263860-3ad0-4b78-9960-d8c7e0c3a806" containerName="controller-manager" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.682029 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="97263860-3ad0-4b78-9960-d8c7e0c3a806" containerName="controller-manager" Feb 27 08:33:35 crc kubenswrapper[4906]: E0227 08:33:35.682049 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d0f57e44-e247-4a99-afe1-59214fc0f1bd" containerName="pruner" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.682061 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="d0f57e44-e247-4a99-afe1-59214fc0f1bd" containerName="pruner" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.682200 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="97263860-3ad0-4b78-9960-d8c7e0c3a806" containerName="controller-manager" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.682223 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="d0f57e44-e247-4a99-afe1-59214fc0f1bd" containerName="pruner" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.682623 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.685939 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.686245 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.687133 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.687444 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.687630 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.687805 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.723818 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.742501 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5b699f89b4-tsf9z"] Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.822362 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-serving-cert\") pod \"controller-manager-5b699f89b4-tsf9z\" (UID: \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\") " pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.822485 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlk7x\" (UniqueName: \"kubernetes.io/projected/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-kube-api-access-hlk7x\") pod \"controller-manager-5b699f89b4-tsf9z\" (UID: \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\") " pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.822520 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-proxy-ca-bundles\") pod \"controller-manager-5b699f89b4-tsf9z\" (UID: \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\") " pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.822613 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-client-ca\") pod \"controller-manager-5b699f89b4-tsf9z\" (UID: \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\") " pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.822819 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-config\") pod \"controller-manager-5b699f89b4-tsf9z\" (UID: \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\") " pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.925000 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-serving-cert\") pod \"controller-manager-5b699f89b4-tsf9z\" (UID: \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\") " pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.925104 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlk7x\" (UniqueName: \"kubernetes.io/projected/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-kube-api-access-hlk7x\") pod \"controller-manager-5b699f89b4-tsf9z\" (UID: \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\") " pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.925138 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-proxy-ca-bundles\") pod \"controller-manager-5b699f89b4-tsf9z\" (UID: \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\") " pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.925192 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-client-ca\") pod \"controller-manager-5b699f89b4-tsf9z\" (UID: \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\") " pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.925235 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-config\") pod \"controller-manager-5b699f89b4-tsf9z\" (UID: \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\") " pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.926718 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-client-ca\") pod \"controller-manager-5b699f89b4-tsf9z\" (UID: \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\") " pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.926939 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-proxy-ca-bundles\") pod \"controller-manager-5b699f89b4-tsf9z\" (UID: \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\") " pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.926963 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-config\") pod \"controller-manager-5b699f89b4-tsf9z\" (UID: \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\") " pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.931334 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-serving-cert\") pod \"controller-manager-5b699f89b4-tsf9z\" (UID: \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\") " pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:33:35 crc kubenswrapper[4906]: I0227 08:33:35.954516 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlk7x\" (UniqueName: \"kubernetes.io/projected/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-kube-api-access-hlk7x\") pod \"controller-manager-5b699f89b4-tsf9z\" (UID: \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\") " pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:33:36 crc kubenswrapper[4906]: I0227 08:33:36.065063 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:33:36 crc kubenswrapper[4906]: I0227 08:33:36.296710 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5b699f89b4-tsf9z"] Feb 27 08:33:36 crc kubenswrapper[4906]: I0227 08:33:36.462151 4906 generic.go:334] "Generic (PLEG): container finished" podID="5deb1490-b634-484c-a7ea-56f3ee6cad31" containerID="e9a566d1539ad42da990b4e2135ce2702c6c25f51d62ff1f6ef27dccfeea652a" exitCode=0 Feb 27 08:33:36 crc kubenswrapper[4906]: I0227 08:33:36.462285 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rtknh" event={"ID":"5deb1490-b634-484c-a7ea-56f3ee6cad31","Type":"ContainerDied","Data":"e9a566d1539ad42da990b4e2135ce2702c6c25f51d62ff1f6ef27dccfeea652a"} Feb 27 08:33:36 crc kubenswrapper[4906]: I0227 08:33:36.466123 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" event={"ID":"21a48fc5-0546-4804-9ad9-4de5fcef0cb0","Type":"ContainerStarted","Data":"80fa788a2eba910f40714cda997e5b3ba4b2739cdd09aba493fcbff35c0cf143"} Feb 27 08:33:36 crc kubenswrapper[4906]: I0227 08:33:36.470527 4906 generic.go:334] "Generic (PLEG): container finished" podID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" containerID="04c8ff27e5c84ce301dbc3b9ac86b5635c816102f10c04d05432050cb6b9bc84" exitCode=0 Feb 27 08:33:36 crc kubenswrapper[4906]: I0227 08:33:36.470622 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zb9z6" event={"ID":"e44e0cc5-2800-489c-9bd8-0f06f15adfca","Type":"ContainerDied","Data":"04c8ff27e5c84ce301dbc3b9ac86b5635c816102f10c04d05432050cb6b9bc84"} Feb 27 08:33:37 crc kubenswrapper[4906]: I0227 08:33:37.482133 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" event={"ID":"21a48fc5-0546-4804-9ad9-4de5fcef0cb0","Type":"ContainerStarted","Data":"f76ba051201b959a58e82b74bb147b82c6b1af758965fa7f5b2c33686c925508"} Feb 27 08:33:38 crc kubenswrapper[4906]: I0227 08:33:38.489168 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:33:38 crc kubenswrapper[4906]: I0227 08:33:38.494033 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:33:38 crc kubenswrapper[4906]: I0227 08:33:38.509828 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" podStartSLOduration=53.509797414 podStartE2EDuration="53.509797414s" podCreationTimestamp="2026-02-27 08:32:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:33:38.507658863 +0000 UTC m=+316.902060473" watchObservedRunningTime="2026-02-27 08:33:38.509797414 +0000 UTC m=+316.904199024" Feb 27 08:33:39 crc kubenswrapper[4906]: E0227 08:33:39.747804 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oc\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift4/ose-cli:latest\\\"\"" pod="openshift-infra/auto-csr-approver-29536350-85r2j" podUID="f743212c-ed63-408e-8063-ed04c8a7a1a9" Feb 27 08:33:41 crc kubenswrapper[4906]: I0227 08:33:41.129196 4906 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkv6l container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Feb 27 08:33:41 crc kubenswrapper[4906]: I0227 08:33:41.129331 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Feb 27 08:33:41 crc kubenswrapper[4906]: I0227 08:33:41.130142 4906 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkv6l container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" start-of-body= Feb 27 08:33:41 crc kubenswrapper[4906]: I0227 08:33:41.130240 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-hkv6l" podUID="f3cc181a-c108-493a-87fa-9bf76f81b062" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.25:8080/\": dial tcp 10.217.0.25:8080: connect: connection refused" Feb 27 08:33:41 crc kubenswrapper[4906]: E0227 08:33:41.840185 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oc\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift4/ose-cli:latest\\\"\"" pod="openshift-infra/auto-csr-approver-29536352-8fqhc" podUID="582fc06a-0d1d-4260-a91f-af317ab278d9" Feb 27 08:33:41 crc kubenswrapper[4906]: E0227 08:33:41.842398 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-knxlc" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" Feb 27 08:33:51 crc kubenswrapper[4906]: I0227 08:33:51.139912 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-hkv6l" Feb 27 08:33:56 crc kubenswrapper[4906]: I0227 08:33:56.637829 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pfbgw" event={"ID":"d81cc2f1-f6bc-454c-a927-973bf6bc452b","Type":"ContainerStarted","Data":"69c815ab4d3ee91f213ce651ac226797b32433f5629969bedc6a5eba04a1583f"} Feb 27 08:33:56 crc kubenswrapper[4906]: I0227 08:33:56.640611 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxdrs" event={"ID":"b8b3f22b-652a-4703-bfdb-520d2f90867a","Type":"ContainerStarted","Data":"4e5350a592312e8ac376ba58b577f7ffff7359ac65614368607816810d353e26"} Feb 27 08:33:56 crc kubenswrapper[4906]: I0227 08:33:56.644373 4906 generic.go:334] "Generic (PLEG): container finished" podID="86ce64fc-356d-4172-b0c0-8074921dc727" containerID="d360e8203fe660a8293d8efee94c52488e144246e022a7ce807985e6decf868e" exitCode=0 Feb 27 08:33:56 crc kubenswrapper[4906]: I0227 08:33:56.644388 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t5l4n" event={"ID":"86ce64fc-356d-4172-b0c0-8074921dc727","Type":"ContainerDied","Data":"d360e8203fe660a8293d8efee94c52488e144246e022a7ce807985e6decf868e"} Feb 27 08:33:56 crc kubenswrapper[4906]: I0227 08:33:56.646361 4906 generic.go:334] "Generic (PLEG): container finished" podID="2604542b-6cc4-44dc-ab74-f493ac742db9" containerID="9134ff97495c593da5e6813f827dd9c106bb64c06b8d78731e02e2524a1e5b73" exitCode=0 Feb 27 08:33:56 crc kubenswrapper[4906]: I0227 08:33:56.646437 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zn5pm" event={"ID":"2604542b-6cc4-44dc-ab74-f493ac742db9","Type":"ContainerDied","Data":"9134ff97495c593da5e6813f827dd9c106bb64c06b8d78731e02e2524a1e5b73"} Feb 27 08:33:56 crc kubenswrapper[4906]: I0227 08:33:56.650387 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rtknh" event={"ID":"5deb1490-b634-484c-a7ea-56f3ee6cad31","Type":"ContainerStarted","Data":"34f07bc951721eef2a395b938d1e1c55a3ea4abeeba47a2cfdc1768716dae7ac"} Feb 27 08:33:56 crc kubenswrapper[4906]: I0227 08:33:56.653578 4906 generic.go:334] "Generic (PLEG): container finished" podID="f9c97127-8fdd-40b2-8248-40df8c50e302" containerID="a29fd5454bd1f4c2cc63cd1848bc242ade8481f08d382f1ac1ceac8e03668cd5" exitCode=0 Feb 27 08:33:56 crc kubenswrapper[4906]: I0227 08:33:56.653689 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hw96v" event={"ID":"f9c97127-8fdd-40b2-8248-40df8c50e302","Type":"ContainerDied","Data":"a29fd5454bd1f4c2cc63cd1848bc242ade8481f08d382f1ac1ceac8e03668cd5"} Feb 27 08:33:56 crc kubenswrapper[4906]: I0227 08:33:56.659629 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pfbgw" podStartSLOduration=18.599456444 podStartE2EDuration="1m46.659600455s" podCreationTimestamp="2026-02-27 08:32:10 +0000 UTC" firstStartedPulling="2026-02-27 08:32:13.781312749 +0000 UTC m=+232.175714359" lastFinishedPulling="2026-02-27 08:33:41.84145676 +0000 UTC m=+320.235858370" observedRunningTime="2026-02-27 08:33:56.659344997 +0000 UTC m=+335.053746617" watchObservedRunningTime="2026-02-27 08:33:56.659600455 +0000 UTC m=+335.054002065" Feb 27 08:33:56 crc kubenswrapper[4906]: I0227 08:33:56.660605 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zb9z6" event={"ID":"e44e0cc5-2800-489c-9bd8-0f06f15adfca","Type":"ContainerStarted","Data":"016ebd00b85fba4d56336cd46607b21d1cf2993aa7a0e9eedef9e9812b33ba0c"} Feb 27 08:33:56 crc kubenswrapper[4906]: I0227 08:33:56.683581 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jxdrs" podStartSLOduration=8.132588699 podStartE2EDuration="1m44.683552034s" podCreationTimestamp="2026-02-27 08:32:12 +0000 UTC" firstStartedPulling="2026-02-27 08:32:18.851850953 +0000 UTC m=+237.246252563" lastFinishedPulling="2026-02-27 08:33:55.402814258 +0000 UTC m=+333.797215898" observedRunningTime="2026-02-27 08:33:56.683185043 +0000 UTC m=+335.077586673" watchObservedRunningTime="2026-02-27 08:33:56.683552034 +0000 UTC m=+335.077953644" Feb 27 08:33:56 crc kubenswrapper[4906]: I0227 08:33:56.762501 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rtknh" podStartSLOduration=7.200484873 podStartE2EDuration="1m43.762472635s" podCreationTimestamp="2026-02-27 08:32:13 +0000 UTC" firstStartedPulling="2026-02-27 08:32:18.850588707 +0000 UTC m=+237.244990317" lastFinishedPulling="2026-02-27 08:33:55.412576449 +0000 UTC m=+333.806978079" observedRunningTime="2026-02-27 08:33:56.738712331 +0000 UTC m=+335.133113951" watchObservedRunningTime="2026-02-27 08:33:56.762472635 +0000 UTC m=+335.156874245" Feb 27 08:33:56 crc kubenswrapper[4906]: I0227 08:33:56.780730 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zb9z6" podStartSLOduration=16.386253107 podStartE2EDuration="1m43.78070773s" podCreationTimestamp="2026-02-27 08:32:13 +0000 UTC" firstStartedPulling="2026-02-27 08:32:19.862820891 +0000 UTC m=+238.257222501" lastFinishedPulling="2026-02-27 08:33:47.257275504 +0000 UTC m=+325.651677124" observedRunningTime="2026-02-27 08:33:56.778091275 +0000 UTC m=+335.172492885" watchObservedRunningTime="2026-02-27 08:33:56.78070773 +0000 UTC m=+335.175109340" Feb 27 08:33:57 crc kubenswrapper[4906]: I0227 08:33:57.699810 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zn5pm" event={"ID":"2604542b-6cc4-44dc-ab74-f493ac742db9","Type":"ContainerStarted","Data":"2857c2dd2209454fa9322face515508b19d20e3a4db250411e8b9a9a4b718062"} Feb 27 08:33:57 crc kubenswrapper[4906]: I0227 08:33:57.705235 4906 generic.go:334] "Generic (PLEG): container finished" podID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" containerID="688fea19ece1831380fad2b934d3f5beebcee622ec73837de3cd4b7ea7b1cecb" exitCode=0 Feb 27 08:33:57 crc kubenswrapper[4906]: I0227 08:33:57.705334 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-knxlc" event={"ID":"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7","Type":"ContainerDied","Data":"688fea19ece1831380fad2b934d3f5beebcee622ec73837de3cd4b7ea7b1cecb"} Feb 27 08:33:57 crc kubenswrapper[4906]: I0227 08:33:57.709907 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t5l4n" event={"ID":"86ce64fc-356d-4172-b0c0-8074921dc727","Type":"ContainerStarted","Data":"e9651d07ea8cf43bc78da600ef1773aef8885b18ec5d27ba1d15c999ec4cc0a0"} Feb 27 08:33:57 crc kubenswrapper[4906]: I0227 08:33:57.716627 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536350-85r2j" event={"ID":"f743212c-ed63-408e-8063-ed04c8a7a1a9","Type":"ContainerStarted","Data":"03833bc81af18f8e05b37ffe1347f478a1b73a22faaff39a186c0c9246d4c1f0"} Feb 27 08:33:57 crc kubenswrapper[4906]: I0227 08:33:57.733337 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zn5pm" podStartSLOduration=3.204453347 podStartE2EDuration="1m47.733308713s" podCreationTimestamp="2026-02-27 08:32:10 +0000 UTC" firstStartedPulling="2026-02-27 08:32:12.756097465 +0000 UTC m=+231.150499075" lastFinishedPulling="2026-02-27 08:33:57.284952831 +0000 UTC m=+335.679354441" observedRunningTime="2026-02-27 08:33:57.730446031 +0000 UTC m=+336.124847641" watchObservedRunningTime="2026-02-27 08:33:57.733308713 +0000 UTC m=+336.127710323" Feb 27 08:33:57 crc kubenswrapper[4906]: I0227 08:33:57.778011 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-t5l4n" podStartSLOduration=3.251079195 podStartE2EDuration="1m47.777989119s" podCreationTimestamp="2026-02-27 08:32:10 +0000 UTC" firstStartedPulling="2026-02-27 08:32:12.693097338 +0000 UTC m=+231.087498938" lastFinishedPulling="2026-02-27 08:33:57.220007252 +0000 UTC m=+335.614408862" observedRunningTime="2026-02-27 08:33:57.775568759 +0000 UTC m=+336.169970369" watchObservedRunningTime="2026-02-27 08:33:57.777989119 +0000 UTC m=+336.172390739" Feb 27 08:33:57 crc kubenswrapper[4906]: I0227 08:33:57.796171 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-infra/auto-csr-approver-29536350-85r2j" podStartSLOduration=123.934879669 podStartE2EDuration="3m57.796146111s" podCreationTimestamp="2026-02-27 08:30:00 +0000 UTC" firstStartedPulling="2026-02-27 08:32:03.025124319 +0000 UTC m=+221.419525939" lastFinishedPulling="2026-02-27 08:33:56.886390771 +0000 UTC m=+335.280792381" observedRunningTime="2026-02-27 08:33:57.793240308 +0000 UTC m=+336.187641928" watchObservedRunningTime="2026-02-27 08:33:57.796146111 +0000 UTC m=+336.190547721" Feb 27 08:33:57 crc kubenswrapper[4906]: I0227 08:33:57.979044 4906 csr.go:261] certificate signing request csr-zrkts is approved, waiting to be issued Feb 27 08:33:57 crc kubenswrapper[4906]: I0227 08:33:57.987473 4906 csr.go:257] certificate signing request csr-zrkts is issued Feb 27 08:33:58 crc kubenswrapper[4906]: I0227 08:33:58.727387 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hw96v" event={"ID":"f9c97127-8fdd-40b2-8248-40df8c50e302","Type":"ContainerStarted","Data":"32adfec94cacc4e4e7e2ae8ac3f06773f6cbe12050a6143319ed6757ce419d7b"} Feb 27 08:33:58 crc kubenswrapper[4906]: I0227 08:33:58.989562 4906 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-02-24 05:54:36 +0000 UTC, rotation deadline is 2026-12-26 00:33:06.09177112 +0000 UTC Feb 27 08:33:58 crc kubenswrapper[4906]: I0227 08:33:58.989624 4906 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 7239h59m7.102149705s for next certificate rotation Feb 27 08:33:59 crc kubenswrapper[4906]: I0227 08:33:59.989824 4906 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-02-24 05:54:36 +0000 UTC, rotation deadline is 2026-12-07 20:39:03.084406497 +0000 UTC Feb 27 08:33:59 crc kubenswrapper[4906]: I0227 08:33:59.989907 4906 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6804h5m3.094503997s for next certificate rotation Feb 27 08:34:00 crc kubenswrapper[4906]: I0227 08:34:00.145503 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536354-2w5js"] Feb 27 08:34:00 crc kubenswrapper[4906]: I0227 08:34:00.146360 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536354-2w5js" Feb 27 08:34:00 crc kubenswrapper[4906]: I0227 08:34:00.150786 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 08:34:00 crc kubenswrapper[4906]: I0227 08:34:00.154195 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536354-2w5js"] Feb 27 08:34:00 crc kubenswrapper[4906]: I0227 08:34:00.315399 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxxgd\" (UniqueName: \"kubernetes.io/projected/41d7dad5-7a02-4698-9385-0673efb99b6a-kube-api-access-cxxgd\") pod \"auto-csr-approver-29536354-2w5js\" (UID: \"41d7dad5-7a02-4698-9385-0673efb99b6a\") " pod="openshift-infra/auto-csr-approver-29536354-2w5js" Feb 27 08:34:00 crc kubenswrapper[4906]: I0227 08:34:00.401453 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-t5l4n" Feb 27 08:34:00 crc kubenswrapper[4906]: I0227 08:34:00.401522 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-t5l4n" Feb 27 08:34:00 crc kubenswrapper[4906]: I0227 08:34:00.418588 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxxgd\" (UniqueName: \"kubernetes.io/projected/41d7dad5-7a02-4698-9385-0673efb99b6a-kube-api-access-cxxgd\") pod \"auto-csr-approver-29536354-2w5js\" (UID: \"41d7dad5-7a02-4698-9385-0673efb99b6a\") " pod="openshift-infra/auto-csr-approver-29536354-2w5js" Feb 27 08:34:00 crc kubenswrapper[4906]: I0227 08:34:00.449000 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxxgd\" (UniqueName: \"kubernetes.io/projected/41d7dad5-7a02-4698-9385-0673efb99b6a-kube-api-access-cxxgd\") pod \"auto-csr-approver-29536354-2w5js\" (UID: \"41d7dad5-7a02-4698-9385-0673efb99b6a\") " pod="openshift-infra/auto-csr-approver-29536354-2w5js" Feb 27 08:34:00 crc kubenswrapper[4906]: I0227 08:34:00.463232 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536354-2w5js" Feb 27 08:34:00 crc kubenswrapper[4906]: I0227 08:34:00.741591 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536352-8fqhc" event={"ID":"582fc06a-0d1d-4260-a91f-af317ab278d9","Type":"ContainerStarted","Data":"726a8aa0c8e7c8d00a8188ed592f542bd801dddc049730f675f2fb4c83ee1125"} Feb 27 08:34:00 crc kubenswrapper[4906]: I0227 08:34:00.743341 4906 generic.go:334] "Generic (PLEG): container finished" podID="f743212c-ed63-408e-8063-ed04c8a7a1a9" containerID="03833bc81af18f8e05b37ffe1347f478a1b73a22faaff39a186c0c9246d4c1f0" exitCode=0 Feb 27 08:34:00 crc kubenswrapper[4906]: I0227 08:34:00.743501 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536350-85r2j" event={"ID":"f743212c-ed63-408e-8063-ed04c8a7a1a9","Type":"ContainerDied","Data":"03833bc81af18f8e05b37ffe1347f478a1b73a22faaff39a186c0c9246d4c1f0"} Feb 27 08:34:00 crc kubenswrapper[4906]: I0227 08:34:00.766600 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-hw96v" podStartSLOduration=6.83752008 podStartE2EDuration="1m51.76657288s" podCreationTimestamp="2026-02-27 08:32:09 +0000 UTC" firstStartedPulling="2026-02-27 08:32:12.594062554 +0000 UTC m=+230.988464164" lastFinishedPulling="2026-02-27 08:33:57.523115354 +0000 UTC m=+335.917516964" observedRunningTime="2026-02-27 08:34:00.764868581 +0000 UTC m=+339.159270221" watchObservedRunningTime="2026-02-27 08:34:00.76657288 +0000 UTC m=+339.160974490" Feb 27 08:34:00 crc kubenswrapper[4906]: I0227 08:34:00.879242 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zn5pm" Feb 27 08:34:00 crc kubenswrapper[4906]: I0227 08:34:00.879554 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zn5pm" Feb 27 08:34:00 crc kubenswrapper[4906]: I0227 08:34:00.928048 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pfbgw" Feb 27 08:34:00 crc kubenswrapper[4906]: I0227 08:34:00.928102 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pfbgw" Feb 27 08:34:00 crc kubenswrapper[4906]: I0227 08:34:00.931576 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536354-2w5js"] Feb 27 08:34:00 crc kubenswrapper[4906]: W0227 08:34:00.952246 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41d7dad5_7a02_4698_9385_0673efb99b6a.slice/crio-280513a7044aee48b28524772c5bd53875be208246bdf0c8b72a9da2e2a06185 WatchSource:0}: Error finding container 280513a7044aee48b28524772c5bd53875be208246bdf0c8b72a9da2e2a06185: Status 404 returned error can't find the container with id 280513a7044aee48b28524772c5bd53875be208246bdf0c8b72a9da2e2a06185 Feb 27 08:34:01 crc kubenswrapper[4906]: I0227 08:34:01.618081 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-t5l4n" Feb 27 08:34:01 crc kubenswrapper[4906]: I0227 08:34:01.618198 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pfbgw" Feb 27 08:34:01 crc kubenswrapper[4906]: I0227 08:34:01.618370 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zn5pm" Feb 27 08:34:01 crc kubenswrapper[4906]: I0227 08:34:01.765866 4906 generic.go:334] "Generic (PLEG): container finished" podID="582fc06a-0d1d-4260-a91f-af317ab278d9" containerID="726a8aa0c8e7c8d00a8188ed592f542bd801dddc049730f675f2fb4c83ee1125" exitCode=0 Feb 27 08:34:01 crc kubenswrapper[4906]: I0227 08:34:01.765968 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536352-8fqhc" event={"ID":"582fc06a-0d1d-4260-a91f-af317ab278d9","Type":"ContainerDied","Data":"726a8aa0c8e7c8d00a8188ed592f542bd801dddc049730f675f2fb4c83ee1125"} Feb 27 08:34:01 crc kubenswrapper[4906]: I0227 08:34:01.767014 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536354-2w5js" event={"ID":"41d7dad5-7a02-4698-9385-0673efb99b6a","Type":"ContainerStarted","Data":"280513a7044aee48b28524772c5bd53875be208246bdf0c8b72a9da2e2a06185"} Feb 27 08:34:01 crc kubenswrapper[4906]: I0227 08:34:01.836115 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pfbgw" Feb 27 08:34:02 crc kubenswrapper[4906]: I0227 08:34:02.214866 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536350-85r2j" Feb 27 08:34:02 crc kubenswrapper[4906]: I0227 08:34:02.254038 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9wgc\" (UniqueName: \"kubernetes.io/projected/f743212c-ed63-408e-8063-ed04c8a7a1a9-kube-api-access-x9wgc\") pod \"f743212c-ed63-408e-8063-ed04c8a7a1a9\" (UID: \"f743212c-ed63-408e-8063-ed04c8a7a1a9\") " Feb 27 08:34:02 crc kubenswrapper[4906]: I0227 08:34:02.266451 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f743212c-ed63-408e-8063-ed04c8a7a1a9-kube-api-access-x9wgc" (OuterVolumeSpecName: "kube-api-access-x9wgc") pod "f743212c-ed63-408e-8063-ed04c8a7a1a9" (UID: "f743212c-ed63-408e-8063-ed04c8a7a1a9"). InnerVolumeSpecName "kube-api-access-x9wgc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:34:02 crc kubenswrapper[4906]: I0227 08:34:02.355508 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9wgc\" (UniqueName: \"kubernetes.io/projected/f743212c-ed63-408e-8063-ed04c8a7a1a9-kube-api-access-x9wgc\") on node \"crc\" DevicePath \"\"" Feb 27 08:34:02 crc kubenswrapper[4906]: I0227 08:34:02.618331 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jxdrs" Feb 27 08:34:02 crc kubenswrapper[4906]: I0227 08:34:02.618397 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jxdrs" Feb 27 08:34:02 crc kubenswrapper[4906]: I0227 08:34:02.665969 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jxdrs" Feb 27 08:34:02 crc kubenswrapper[4906]: I0227 08:34:02.781449 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536350-85r2j" Feb 27 08:34:02 crc kubenswrapper[4906]: I0227 08:34:02.781439 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536350-85r2j" event={"ID":"f743212c-ed63-408e-8063-ed04c8a7a1a9","Type":"ContainerDied","Data":"80d44b9ef76d050e594c6e683d32cf61ac3d34056278a78e82f4ffdba78def70"} Feb 27 08:34:02 crc kubenswrapper[4906]: I0227 08:34:02.782016 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80d44b9ef76d050e594c6e683d32cf61ac3d34056278a78e82f4ffdba78def70" Feb 27 08:34:02 crc kubenswrapper[4906]: I0227 08:34:02.828383 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jxdrs" Feb 27 08:34:02 crc kubenswrapper[4906]: I0227 08:34:02.836632 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zn5pm" Feb 27 08:34:03 crc kubenswrapper[4906]: I0227 08:34:03.071406 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pfbgw"] Feb 27 08:34:03 crc kubenswrapper[4906]: I0227 08:34:03.573603 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rtknh" Feb 27 08:34:03 crc kubenswrapper[4906]: I0227 08:34:03.573686 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rtknh" Feb 27 08:34:03 crc kubenswrapper[4906]: I0227 08:34:03.788028 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-pfbgw" podUID="d81cc2f1-f6bc-454c-a927-973bf6bc452b" containerName="registry-server" containerID="cri-o://69c815ab4d3ee91f213ce651ac226797b32433f5629969bedc6a5eba04a1583f" gracePeriod=2 Feb 27 08:34:03 crc kubenswrapper[4906]: I0227 08:34:03.793525 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zb9z6" Feb 27 08:34:03 crc kubenswrapper[4906]: I0227 08:34:03.793585 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zb9z6" Feb 27 08:34:04 crc kubenswrapper[4906]: I0227 08:34:04.217574 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536352-8fqhc" Feb 27 08:34:04 crc kubenswrapper[4906]: I0227 08:34:04.388944 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zhhwt\" (UniqueName: \"kubernetes.io/projected/582fc06a-0d1d-4260-a91f-af317ab278d9-kube-api-access-zhhwt\") pod \"582fc06a-0d1d-4260-a91f-af317ab278d9\" (UID: \"582fc06a-0d1d-4260-a91f-af317ab278d9\") " Feb 27 08:34:04 crc kubenswrapper[4906]: I0227 08:34:04.410173 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/582fc06a-0d1d-4260-a91f-af317ab278d9-kube-api-access-zhhwt" (OuterVolumeSpecName: "kube-api-access-zhhwt") pod "582fc06a-0d1d-4260-a91f-af317ab278d9" (UID: "582fc06a-0d1d-4260-a91f-af317ab278d9"). InnerVolumeSpecName "kube-api-access-zhhwt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:34:04 crc kubenswrapper[4906]: I0227 08:34:04.492370 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zhhwt\" (UniqueName: \"kubernetes.io/projected/582fc06a-0d1d-4260-a91f-af317ab278d9-kube-api-access-zhhwt\") on node \"crc\" DevicePath \"\"" Feb 27 08:34:04 crc kubenswrapper[4906]: I0227 08:34:04.623680 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-rtknh" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" containerName="registry-server" probeResult="failure" output=< Feb 27 08:34:04 crc kubenswrapper[4906]: timeout: failed to connect service ":50051" within 1s Feb 27 08:34:04 crc kubenswrapper[4906]: > Feb 27 08:34:04 crc kubenswrapper[4906]: I0227 08:34:04.802776 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536352-8fqhc" event={"ID":"582fc06a-0d1d-4260-a91f-af317ab278d9","Type":"ContainerDied","Data":"babe2b86473c7e1080573fe64cab36fa7e500588ae62c608abf02503e207c883"} Feb 27 08:34:04 crc kubenswrapper[4906]: I0227 08:34:04.803652 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="babe2b86473c7e1080573fe64cab36fa7e500588ae62c608abf02503e207c883" Feb 27 08:34:04 crc kubenswrapper[4906]: I0227 08:34:04.802898 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536352-8fqhc" Feb 27 08:34:04 crc kubenswrapper[4906]: I0227 08:34:04.839638 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-zb9z6" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" containerName="registry-server" probeResult="failure" output=< Feb 27 08:34:04 crc kubenswrapper[4906]: timeout: failed to connect service ":50051" within 1s Feb 27 08:34:04 crc kubenswrapper[4906]: > Feb 27 08:34:04 crc kubenswrapper[4906]: I0227 08:34:04.873975 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zn5pm"] Feb 27 08:34:04 crc kubenswrapper[4906]: I0227 08:34:04.874262 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zn5pm" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" containerName="registry-server" containerID="cri-o://2857c2dd2209454fa9322face515508b19d20e3a4db250411e8b9a9a4b718062" gracePeriod=2 Feb 27 08:34:05 crc kubenswrapper[4906]: I0227 08:34:05.475961 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jxdrs"] Feb 27 08:34:05 crc kubenswrapper[4906]: I0227 08:34:05.477021 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jxdrs" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" containerName="registry-server" containerID="cri-o://4e5350a592312e8ac376ba58b577f7ffff7359ac65614368607816810d353e26" gracePeriod=2 Feb 27 08:34:05 crc kubenswrapper[4906]: I0227 08:34:05.815432 4906 generic.go:334] "Generic (PLEG): container finished" podID="d81cc2f1-f6bc-454c-a927-973bf6bc452b" containerID="69c815ab4d3ee91f213ce651ac226797b32433f5629969bedc6a5eba04a1583f" exitCode=0 Feb 27 08:34:05 crc kubenswrapper[4906]: I0227 08:34:05.818305 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pfbgw" event={"ID":"d81cc2f1-f6bc-454c-a927-973bf6bc452b","Type":"ContainerDied","Data":"69c815ab4d3ee91f213ce651ac226797b32433f5629969bedc6a5eba04a1583f"} Feb 27 08:34:07 crc kubenswrapper[4906]: I0227 08:34:07.511554 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pfbgw" Feb 27 08:34:07 crc kubenswrapper[4906]: I0227 08:34:07.645799 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qblcj\" (UniqueName: \"kubernetes.io/projected/d81cc2f1-f6bc-454c-a927-973bf6bc452b-kube-api-access-qblcj\") pod \"d81cc2f1-f6bc-454c-a927-973bf6bc452b\" (UID: \"d81cc2f1-f6bc-454c-a927-973bf6bc452b\") " Feb 27 08:34:07 crc kubenswrapper[4906]: I0227 08:34:07.645982 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d81cc2f1-f6bc-454c-a927-973bf6bc452b-utilities\") pod \"d81cc2f1-f6bc-454c-a927-973bf6bc452b\" (UID: \"d81cc2f1-f6bc-454c-a927-973bf6bc452b\") " Feb 27 08:34:07 crc kubenswrapper[4906]: I0227 08:34:07.646150 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d81cc2f1-f6bc-454c-a927-973bf6bc452b-catalog-content\") pod \"d81cc2f1-f6bc-454c-a927-973bf6bc452b\" (UID: \"d81cc2f1-f6bc-454c-a927-973bf6bc452b\") " Feb 27 08:34:07 crc kubenswrapper[4906]: I0227 08:34:07.646923 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d81cc2f1-f6bc-454c-a927-973bf6bc452b-utilities" (OuterVolumeSpecName: "utilities") pod "d81cc2f1-f6bc-454c-a927-973bf6bc452b" (UID: "d81cc2f1-f6bc-454c-a927-973bf6bc452b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:34:07 crc kubenswrapper[4906]: I0227 08:34:07.647691 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d81cc2f1-f6bc-454c-a927-973bf6bc452b-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 08:34:07 crc kubenswrapper[4906]: I0227 08:34:07.652268 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d81cc2f1-f6bc-454c-a927-973bf6bc452b-kube-api-access-qblcj" (OuterVolumeSpecName: "kube-api-access-qblcj") pod "d81cc2f1-f6bc-454c-a927-973bf6bc452b" (UID: "d81cc2f1-f6bc-454c-a927-973bf6bc452b"). InnerVolumeSpecName "kube-api-access-qblcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:34:07 crc kubenswrapper[4906]: I0227 08:34:07.701863 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d81cc2f1-f6bc-454c-a927-973bf6bc452b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d81cc2f1-f6bc-454c-a927-973bf6bc452b" (UID: "d81cc2f1-f6bc-454c-a927-973bf6bc452b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:34:07 crc kubenswrapper[4906]: I0227 08:34:07.749607 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d81cc2f1-f6bc-454c-a927-973bf6bc452b-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 08:34:07 crc kubenswrapper[4906]: I0227 08:34:07.749699 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qblcj\" (UniqueName: \"kubernetes.io/projected/d81cc2f1-f6bc-454c-a927-973bf6bc452b-kube-api-access-qblcj\") on node \"crc\" DevicePath \"\"" Feb 27 08:34:07 crc kubenswrapper[4906]: I0227 08:34:07.831971 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pfbgw" event={"ID":"d81cc2f1-f6bc-454c-a927-973bf6bc452b","Type":"ContainerDied","Data":"caac9ad3fb20410454f1a45a6952b25c93cc8e8114930175bfadddc18b5c1050"} Feb 27 08:34:07 crc kubenswrapper[4906]: I0227 08:34:07.831963 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pfbgw" Feb 27 08:34:07 crc kubenswrapper[4906]: I0227 08:34:07.832040 4906 scope.go:117] "RemoveContainer" containerID="69c815ab4d3ee91f213ce651ac226797b32433f5629969bedc6a5eba04a1583f" Feb 27 08:34:07 crc kubenswrapper[4906]: I0227 08:34:07.835571 4906 generic.go:334] "Generic (PLEG): container finished" podID="2604542b-6cc4-44dc-ab74-f493ac742db9" containerID="2857c2dd2209454fa9322face515508b19d20e3a4db250411e8b9a9a4b718062" exitCode=0 Feb 27 08:34:07 crc kubenswrapper[4906]: I0227 08:34:07.835660 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zn5pm" event={"ID":"2604542b-6cc4-44dc-ab74-f493ac742db9","Type":"ContainerDied","Data":"2857c2dd2209454fa9322face515508b19d20e3a4db250411e8b9a9a4b718062"} Feb 27 08:34:07 crc kubenswrapper[4906]: I0227 08:34:07.865501 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-pfbgw"] Feb 27 08:34:07 crc kubenswrapper[4906]: I0227 08:34:07.873343 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-pfbgw"] Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.568161 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d81cc2f1-f6bc-454c-a927-973bf6bc452b" path="/var/lib/kubelet/pods/d81cc2f1-f6bc-454c-a927-973bf6bc452b/volumes" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.794561 4906 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 27 08:34:08 crc kubenswrapper[4906]: E0227 08:34:08.795460 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f743212c-ed63-408e-8063-ed04c8a7a1a9" containerName="oc" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.795502 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f743212c-ed63-408e-8063-ed04c8a7a1a9" containerName="oc" Feb 27 08:34:08 crc kubenswrapper[4906]: E0227 08:34:08.795537 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="582fc06a-0d1d-4260-a91f-af317ab278d9" containerName="oc" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.795552 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="582fc06a-0d1d-4260-a91f-af317ab278d9" containerName="oc" Feb 27 08:34:08 crc kubenswrapper[4906]: E0227 08:34:08.795576 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d81cc2f1-f6bc-454c-a927-973bf6bc452b" containerName="registry-server" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.795591 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="d81cc2f1-f6bc-454c-a927-973bf6bc452b" containerName="registry-server" Feb 27 08:34:08 crc kubenswrapper[4906]: E0227 08:34:08.795614 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d81cc2f1-f6bc-454c-a927-973bf6bc452b" containerName="extract-utilities" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.795628 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="d81cc2f1-f6bc-454c-a927-973bf6bc452b" containerName="extract-utilities" Feb 27 08:34:08 crc kubenswrapper[4906]: E0227 08:34:08.795643 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d81cc2f1-f6bc-454c-a927-973bf6bc452b" containerName="extract-content" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.795656 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="d81cc2f1-f6bc-454c-a927-973bf6bc452b" containerName="extract-content" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.795838 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f743212c-ed63-408e-8063-ed04c8a7a1a9" containerName="oc" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.795865 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="d81cc2f1-f6bc-454c-a927-973bf6bc452b" containerName="registry-server" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.795919 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="582fc06a-0d1d-4260-a91f-af317ab278d9" containerName="oc" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.796685 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.843395 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.964283 4906 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.964785 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635" gracePeriod=15 Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.964960 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524" gracePeriod=15 Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.964967 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615" gracePeriod=15 Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.965044 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378" gracePeriod=15 Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.965025 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328" gracePeriod=15 Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.968208 4906 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 27 08:34:08 crc kubenswrapper[4906]: E0227 08:34:08.968528 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.968551 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 27 08:34:08 crc kubenswrapper[4906]: E0227 08:34:08.968615 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.968627 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 27 08:34:08 crc kubenswrapper[4906]: E0227 08:34:08.968643 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.968651 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 27 08:34:08 crc kubenswrapper[4906]: E0227 08:34:08.968665 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.968675 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 27 08:34:08 crc kubenswrapper[4906]: E0227 08:34:08.968683 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.968691 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 27 08:34:08 crc kubenswrapper[4906]: E0227 08:34:08.968709 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.968717 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 27 08:34:08 crc kubenswrapper[4906]: E0227 08:34:08.968726 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.968733 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 27 08:34:08 crc kubenswrapper[4906]: E0227 08:34:08.968746 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.968754 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.968934 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.968953 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.968963 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.968980 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.968993 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.969014 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.969034 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 27 08:34:08 crc kubenswrapper[4906]: E0227 08:34:08.969353 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.969374 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 27 08:34:08 crc kubenswrapper[4906]: E0227 08:34:08.969390 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.969398 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.969528 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.969788 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.971045 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.971136 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.971322 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.971384 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 27 08:34:08 crc kubenswrapper[4906]: I0227 08:34:08.971531 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.072764 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.072851 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.072948 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.072990 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.073003 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.072947 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.073042 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.073219 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.073289 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.073328 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.073379 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.073417 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.073461 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.137094 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.175657 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.175770 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.175771 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.175856 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.175958 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.176021 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.287143 4906 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" start-of-body= Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.287248 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.856061 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/3.log" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.862283 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.863577 4906 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328" exitCode=2 Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.867125 4906 generic.go:334] "Generic (PLEG): container finished" podID="b8b3f22b-652a-4703-bfdb-520d2f90867a" containerID="4e5350a592312e8ac376ba58b577f7ffff7359ac65614368607816810d353e26" exitCode=0 Feb 27 08:34:09 crc kubenswrapper[4906]: I0227 08:34:09.867152 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxdrs" event={"ID":"b8b3f22b-652a-4703-bfdb-520d2f90867a","Type":"ContainerDied","Data":"4e5350a592312e8ac376ba58b577f7ffff7359ac65614368607816810d353e26"} Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.192354 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-hw96v" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.192817 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-hw96v" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.252503 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-hw96v" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.253498 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.253765 4906 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.253963 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.436390 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-t5l4n" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.437020 4906 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.437383 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.437866 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.438146 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.737221 4906 scope.go:117] "RemoveContainer" containerID="b0c884e777223a0239f211a5f0b15d940fc6eba968f06c0c2be1be88ef7da39f" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.850045 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zn5pm" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.851489 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.852082 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.852655 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.853131 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.886351 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zn5pm" event={"ID":"2604542b-6cc4-44dc-ab74-f493ac742db9","Type":"ContainerDied","Data":"a919fd27d94ba36cac40777783c534cbbd4fb2b2eba5bf623c26d96c9bb84fbe"} Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.886381 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zn5pm" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.887436 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.888103 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.888795 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.888976 4906 generic.go:334] "Generic (PLEG): container finished" podID="a71a873c-34c6-45e8-9923-816350816e82" containerID="a6aed1a233358b4939ac9c257f7b983907b9f8b97cc04d3016f6e5444c3eda48" exitCode=0 Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.889083 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"a71a873c-34c6-45e8-9923-816350816e82","Type":"ContainerDied","Data":"a6aed1a233358b4939ac9c257f7b983907b9f8b97cc04d3016f6e5444c3eda48"} Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.889387 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.889908 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.890280 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.890742 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.891299 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.891626 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.892090 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/3.log" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.893752 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.894693 4906 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378" exitCode=0 Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.894725 4906 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524" exitCode=0 Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.894739 4906 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615" exitCode=0 Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.908643 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcgrm\" (UniqueName: \"kubernetes.io/projected/2604542b-6cc4-44dc-ab74-f493ac742db9-kube-api-access-fcgrm\") pod \"2604542b-6cc4-44dc-ab74-f493ac742db9\" (UID: \"2604542b-6cc4-44dc-ab74-f493ac742db9\") " Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.908766 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2604542b-6cc4-44dc-ab74-f493ac742db9-catalog-content\") pod \"2604542b-6cc4-44dc-ab74-f493ac742db9\" (UID: \"2604542b-6cc4-44dc-ab74-f493ac742db9\") " Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.908825 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2604542b-6cc4-44dc-ab74-f493ac742db9-utilities\") pod \"2604542b-6cc4-44dc-ab74-f493ac742db9\" (UID: \"2604542b-6cc4-44dc-ab74-f493ac742db9\") " Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.910067 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2604542b-6cc4-44dc-ab74-f493ac742db9-utilities" (OuterVolumeSpecName: "utilities") pod "2604542b-6cc4-44dc-ab74-f493ac742db9" (UID: "2604542b-6cc4-44dc-ab74-f493ac742db9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.915697 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2604542b-6cc4-44dc-ab74-f493ac742db9-kube-api-access-fcgrm" (OuterVolumeSpecName: "kube-api-access-fcgrm") pod "2604542b-6cc4-44dc-ab74-f493ac742db9" (UID: "2604542b-6cc4-44dc-ab74-f493ac742db9"). InnerVolumeSpecName "kube-api-access-fcgrm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:34:10 crc kubenswrapper[4906]: E0227 08:34:10.918058 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:34:10Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:34:10Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:34:10Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:34:10Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: E0227 08:34:10.918754 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: E0227 08:34:10.919282 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: E0227 08:34:10.919685 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: E0227 08:34:10.920188 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: E0227 08:34:10.920224 4906 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.948444 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-hw96v" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.949295 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.949717 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.950009 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.950302 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:10 crc kubenswrapper[4906]: I0227 08:34:10.950546 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:11 crc kubenswrapper[4906]: I0227 08:34:11.009766 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcgrm\" (UniqueName: \"kubernetes.io/projected/2604542b-6cc4-44dc-ab74-f493ac742db9-kube-api-access-fcgrm\") on node \"crc\" DevicePath \"\"" Feb 27 08:34:11 crc kubenswrapper[4906]: I0227 08:34:11.009799 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2604542b-6cc4-44dc-ab74-f493ac742db9-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 08:34:12 crc kubenswrapper[4906]: I0227 08:34:12.554442 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:12 crc kubenswrapper[4906]: I0227 08:34:12.555348 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:12 crc kubenswrapper[4906]: I0227 08:34:12.555924 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:12 crc kubenswrapper[4906]: I0227 08:34:12.556339 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:12 crc kubenswrapper[4906]: I0227 08:34:12.556810 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:12 crc kubenswrapper[4906]: E0227 08:34:12.619133 4906 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4e5350a592312e8ac376ba58b577f7ffff7359ac65614368607816810d353e26 is running failed: container process not found" containerID="4e5350a592312e8ac376ba58b577f7ffff7359ac65614368607816810d353e26" cmd=["grpc_health_probe","-addr=:50051"] Feb 27 08:34:12 crc kubenswrapper[4906]: E0227 08:34:12.620022 4906 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4e5350a592312e8ac376ba58b577f7ffff7359ac65614368607816810d353e26 is running failed: container process not found" containerID="4e5350a592312e8ac376ba58b577f7ffff7359ac65614368607816810d353e26" cmd=["grpc_health_probe","-addr=:50051"] Feb 27 08:34:12 crc kubenswrapper[4906]: E0227 08:34:12.620428 4906 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4e5350a592312e8ac376ba58b577f7ffff7359ac65614368607816810d353e26 is running failed: container process not found" containerID="4e5350a592312e8ac376ba58b577f7ffff7359ac65614368607816810d353e26" cmd=["grpc_health_probe","-addr=:50051"] Feb 27 08:34:12 crc kubenswrapper[4906]: E0227 08:34:12.620483 4906 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4e5350a592312e8ac376ba58b577f7ffff7359ac65614368607816810d353e26 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-jxdrs" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" containerName="registry-server" Feb 27 08:34:12 crc kubenswrapper[4906]: E0227 08:34:12.620984 4906 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.2:6443: connect: connection refused" event="&Event{ObjectMeta:{redhat-marketplace-jxdrs.18980d75b80d9940 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:redhat-marketplace-jxdrs,UID:b8b3f22b-652a-4703-bfdb-520d2f90867a,APIVersion:v1,ResourceVersion:28691,FieldPath:spec.containers{registry-server},},Reason:Unhealthy,Message:Readiness probe errored: rpc error: code = NotFound desc = container is not created or running: checking if PID of 4e5350a592312e8ac376ba58b577f7ffff7359ac65614368607816810d353e26 is running failed: container process not found,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:34:12.62052384 +0000 UTC m=+351.014925460,LastTimestamp:2026-02-27 08:34:12.62052384 +0000 UTC m=+351.014925460,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:34:12 crc kubenswrapper[4906]: I0227 08:34:12.910717 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/3.log" Feb 27 08:34:12 crc kubenswrapper[4906]: I0227 08:34:12.912696 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 27 08:34:12 crc kubenswrapper[4906]: I0227 08:34:12.913602 4906 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635" exitCode=0 Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.336638 4906 scope.go:117] "RemoveContainer" containerID="04355062341bb18ed31119d0177680a8ffa261ae3fef81af61630416bda9c593" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.432497 4906 scope.go:117] "RemoveContainer" containerID="2857c2dd2209454fa9322face515508b19d20e3a4db250411e8b9a9a4b718062" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.453037 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.453689 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.454195 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.454645 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.455013 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.455670 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.462640 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jxdrs" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.463489 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.463961 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.464501 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.464725 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.465134 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.465686 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.480793 4906 scope.go:117] "RemoveContainer" containerID="9134ff97495c593da5e6813f827dd9c106bb64c06b8d78731e02e2524a1e5b73" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.502840 4906 scope.go:117] "RemoveContainer" containerID="02fb7827baa0e19728b6fc202d364fbaa766878959e49d0a47d13128d90702fb" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.520371 4906 scope.go:117] "RemoveContainer" containerID="235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.550915 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wkzf2\" (UniqueName: \"kubernetes.io/projected/b8b3f22b-652a-4703-bfdb-520d2f90867a-kube-api-access-wkzf2\") pod \"b8b3f22b-652a-4703-bfdb-520d2f90867a\" (UID: \"b8b3f22b-652a-4703-bfdb-520d2f90867a\") " Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.550980 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a71a873c-34c6-45e8-9923-816350816e82-kubelet-dir\") pod \"a71a873c-34c6-45e8-9923-816350816e82\" (UID: \"a71a873c-34c6-45e8-9923-816350816e82\") " Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.551054 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a71a873c-34c6-45e8-9923-816350816e82-kube-api-access\") pod \"a71a873c-34c6-45e8-9923-816350816e82\" (UID: \"a71a873c-34c6-45e8-9923-816350816e82\") " Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.551119 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8b3f22b-652a-4703-bfdb-520d2f90867a-utilities\") pod \"b8b3f22b-652a-4703-bfdb-520d2f90867a\" (UID: \"b8b3f22b-652a-4703-bfdb-520d2f90867a\") " Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.551134 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a71a873c-34c6-45e8-9923-816350816e82-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a71a873c-34c6-45e8-9923-816350816e82" (UID: "a71a873c-34c6-45e8-9923-816350816e82"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.551247 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8b3f22b-652a-4703-bfdb-520d2f90867a-catalog-content\") pod \"b8b3f22b-652a-4703-bfdb-520d2f90867a\" (UID: \"b8b3f22b-652a-4703-bfdb-520d2f90867a\") " Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.551312 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/a71a873c-34c6-45e8-9923-816350816e82-var-lock\") pod \"a71a873c-34c6-45e8-9923-816350816e82\" (UID: \"a71a873c-34c6-45e8-9923-816350816e82\") " Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.551431 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a71a873c-34c6-45e8-9923-816350816e82-var-lock" (OuterVolumeSpecName: "var-lock") pod "a71a873c-34c6-45e8-9923-816350816e82" (UID: "a71a873c-34c6-45e8-9923-816350816e82"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.551690 4906 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/a71a873c-34c6-45e8-9923-816350816e82-var-lock\") on node \"crc\" DevicePath \"\"" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.551717 4906 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a71a873c-34c6-45e8-9923-816350816e82-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.552291 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8b3f22b-652a-4703-bfdb-520d2f90867a-utilities" (OuterVolumeSpecName: "utilities") pod "b8b3f22b-652a-4703-bfdb-520d2f90867a" (UID: "b8b3f22b-652a-4703-bfdb-520d2f90867a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.556216 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a71a873c-34c6-45e8-9923-816350816e82-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a71a873c-34c6-45e8-9923-816350816e82" (UID: "a71a873c-34c6-45e8-9923-816350816e82"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.556234 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8b3f22b-652a-4703-bfdb-520d2f90867a-kube-api-access-wkzf2" (OuterVolumeSpecName: "kube-api-access-wkzf2") pod "b8b3f22b-652a-4703-bfdb-520d2f90867a" (UID: "b8b3f22b-652a-4703-bfdb-520d2f90867a"). InnerVolumeSpecName "kube-api-access-wkzf2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.576757 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8b3f22b-652a-4703-bfdb-520d2f90867a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b8b3f22b-652a-4703-bfdb-520d2f90867a" (UID: "b8b3f22b-652a-4703-bfdb-520d2f90867a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.624503 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rtknh" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.625263 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.625923 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.626332 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.626755 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.627291 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.627565 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.627937 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.653305 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8b3f22b-652a-4703-bfdb-520d2f90867a-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.653494 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wkzf2\" (UniqueName: \"kubernetes.io/projected/b8b3f22b-652a-4703-bfdb-520d2f90867a-kube-api-access-wkzf2\") on node \"crc\" DevicePath \"\"" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.653589 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a71a873c-34c6-45e8-9923-816350816e82-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.653656 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8b3f22b-652a-4703-bfdb-520d2f90867a-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.665610 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rtknh" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.666259 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.666680 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.666848 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.667060 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.667464 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.667867 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.668102 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.836003 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zb9z6" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.837141 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.837853 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.838379 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.839086 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.839601 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.839990 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.840357 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.841070 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.895493 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zb9z6" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.896195 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.896942 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.897675 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.898119 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.898577 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.899002 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.899468 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.899981 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.920711 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"a71a873c-34c6-45e8-9923-816350816e82","Type":"ContainerDied","Data":"402f1bb26b8a0cd6203ab8f85f0b4cf603e6f08e4fb0bcd4a5e543af9be08f6e"} Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.920774 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="402f1bb26b8a0cd6203ab8f85f0b4cf603e6f08e4fb0bcd4a5e543af9be08f6e" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.920791 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.925448 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxdrs" event={"ID":"b8b3f22b-652a-4703-bfdb-520d2f90867a","Type":"ContainerDied","Data":"026330a52e0969e4a44f0f10323f8bdc196044cdae219be48ce84cd5f5a000fc"} Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.925515 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jxdrs" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.927006 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.927558 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.927871 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.928153 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.928344 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"662e663815eb817d430acc2d19e3a3dcaa96ca46beeaed457f495e97981268ca"} Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.928567 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.929238 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.929579 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.930211 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.950581 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.951181 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.951635 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.952006 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.952454 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.952728 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.953029 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.953293 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.953624 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.954926 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.955310 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.955706 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.956000 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.956280 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.956495 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:13 crc kubenswrapper[4906]: I0227 08:34:13.957011 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.470564 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.472106 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.473088 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.473919 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.474419 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.474628 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.474847 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.475236 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.475523 4906 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.475753 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.476431 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.565487 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.565556 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.565607 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.565626 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.565688 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.565789 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.565959 4906 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.565982 4906 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.565998 4906 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 27 08:34:14 crc kubenswrapper[4906]: E0227 08:34:14.673059 4906 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.2:6443: connect: connection refused" event="&Event{ObjectMeta:{redhat-marketplace-jxdrs.18980d75b80d9940 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:redhat-marketplace-jxdrs,UID:b8b3f22b-652a-4703-bfdb-520d2f90867a,APIVersion:v1,ResourceVersion:28691,FieldPath:spec.containers{registry-server},},Reason:Unhealthy,Message:Readiness probe errored: rpc error: code = NotFound desc = container is not created or running: checking if PID of 4e5350a592312e8ac376ba58b577f7ffff7359ac65614368607816810d353e26 is running failed: container process not found,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:34:12.62052384 +0000 UTC m=+351.014925460,LastTimestamp:2026-02-27 08:34:12.62052384 +0000 UTC m=+351.014925460,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.936133 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"b9c52c29066c277f8eb7400ea69e5c14389874bb7939f2b482ee598b09f075c9"} Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.939684 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-knxlc" event={"ID":"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7","Type":"ContainerStarted","Data":"3b9e45621525121999c4e0d5415ce33e04f73cdf1d3f5aef94e2c68c27385ad6"} Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.949799 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.951515 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.952408 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.952751 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.953559 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.953840 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.954173 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.954595 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.955086 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.955561 4906 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.955862 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.971943 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.972538 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.972807 4906 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.972987 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.973133 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.973368 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.974349 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.974626 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:14 crc kubenswrapper[4906]: I0227 08:34:14.975315 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:16 crc kubenswrapper[4906]: I0227 08:34:16.562963 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Feb 27 08:34:16 crc kubenswrapper[4906]: I0227 08:34:16.572496 4906 scope.go:117] "RemoveContainer" containerID="4e5350a592312e8ac376ba58b577f7ffff7359ac65614368607816810d353e26" Feb 27 08:34:16 crc kubenswrapper[4906]: I0227 08:34:16.969157 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 27 08:34:16 crc kubenswrapper[4906]: I0227 08:34:16.971577 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:16 crc kubenswrapper[4906]: I0227 08:34:16.972357 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:16 crc kubenswrapper[4906]: I0227 08:34:16.973148 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:16 crc kubenswrapper[4906]: I0227 08:34:16.973493 4906 status_manager.go:851] "Failed to get status for pod" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" pod="openshift-marketplace/redhat-marketplace-knxlc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-knxlc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:16 crc kubenswrapper[4906]: I0227 08:34:16.974314 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:16 crc kubenswrapper[4906]: I0227 08:34:16.974686 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:16 crc kubenswrapper[4906]: I0227 08:34:16.975013 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:16 crc kubenswrapper[4906]: I0227 08:34:16.975498 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:16 crc kubenswrapper[4906]: I0227 08:34:16.975847 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:17 crc kubenswrapper[4906]: I0227 08:34:17.977929 4906 status_manager.go:851] "Failed to get status for pod" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" pod="openshift-marketplace/redhat-marketplace-knxlc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-knxlc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:17 crc kubenswrapper[4906]: I0227 08:34:17.978790 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:17 crc kubenswrapper[4906]: I0227 08:34:17.979749 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:17 crc kubenswrapper[4906]: I0227 08:34:17.980591 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:17 crc kubenswrapper[4906]: I0227 08:34:17.981230 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:17 crc kubenswrapper[4906]: I0227 08:34:17.981636 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:17 crc kubenswrapper[4906]: I0227 08:34:17.982112 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:17 crc kubenswrapper[4906]: I0227 08:34:17.982455 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:17 crc kubenswrapper[4906]: I0227 08:34:17.982936 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:18 crc kubenswrapper[4906]: I0227 08:34:18.309335 4906 scope.go:117] "RemoveContainer" containerID="2df010299977b7c2aa23c58bf59b7d640b305100e1f5e17c55901135c86719c2" Feb 27 08:34:19 crc kubenswrapper[4906]: I0227 08:34:19.115313 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2604542b-6cc4-44dc-ab74-f493ac742db9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2604542b-6cc4-44dc-ab74-f493ac742db9" (UID: "2604542b-6cc4-44dc-ab74-f493ac742db9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:34:19 crc kubenswrapper[4906]: I0227 08:34:19.138138 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2604542b-6cc4-44dc-ab74-f493ac742db9-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 08:34:19 crc kubenswrapper[4906]: E0227 08:34:19.250394 4906 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:19 crc kubenswrapper[4906]: E0227 08:34:19.250861 4906 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:19 crc kubenswrapper[4906]: E0227 08:34:19.251400 4906 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:19 crc kubenswrapper[4906]: E0227 08:34:19.252020 4906 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:19 crc kubenswrapper[4906]: E0227 08:34:19.252723 4906 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:19 crc kubenswrapper[4906]: I0227 08:34:19.252784 4906 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Feb 27 08:34:19 crc kubenswrapper[4906]: E0227 08:34:19.253358 4906 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" interval="200ms" Feb 27 08:34:19 crc kubenswrapper[4906]: I0227 08:34:19.312208 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:19 crc kubenswrapper[4906]: I0227 08:34:19.313195 4906 status_manager.go:851] "Failed to get status for pod" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" pod="openshift-marketplace/redhat-marketplace-knxlc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-knxlc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:19 crc kubenswrapper[4906]: I0227 08:34:19.314126 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:19 crc kubenswrapper[4906]: I0227 08:34:19.315409 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:19 crc kubenswrapper[4906]: I0227 08:34:19.315854 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:19 crc kubenswrapper[4906]: I0227 08:34:19.316379 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:19 crc kubenswrapper[4906]: I0227 08:34:19.316783 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:19 crc kubenswrapper[4906]: I0227 08:34:19.317274 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:19 crc kubenswrapper[4906]: I0227 08:34:19.317604 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:19 crc kubenswrapper[4906]: I0227 08:34:19.449907 4906 scope.go:117] "RemoveContainer" containerID="3d1158c22bc587aecf1924be0ab2c18b1a34506b725f280816176a81ff536947" Feb 27 08:34:19 crc kubenswrapper[4906]: E0227 08:34:19.455018 4906 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" interval="400ms" Feb 27 08:34:19 crc kubenswrapper[4906]: I0227 08:34:19.723208 4906 scope.go:117] "RemoveContainer" containerID="477f7ad1fa21ab7cc69968952c5570984186958dfec230797323b93cc5e00378" Feb 27 08:34:19 crc kubenswrapper[4906]: E0227 08:34:19.856058 4906 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" interval="800ms" Feb 27 08:34:20 crc kubenswrapper[4906]: I0227 08:34:20.118146 4906 scope.go:117] "RemoveContainer" containerID="235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5" Feb 27 08:34:20 crc kubenswrapper[4906]: E0227 08:34:20.118753 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\": container with ID starting with 235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5 not found: ID does not exist" containerID="235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5" Feb 27 08:34:20 crc kubenswrapper[4906]: I0227 08:34:20.118784 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5"} err="failed to get container status \"235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\": rpc error: code = NotFound desc = could not find container \"235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5\": container with ID starting with 235c606a421a7194aa55a9d0c914944b958102be026ca0b4bfd5073bf926bbf5 not found: ID does not exist" Feb 27 08:34:20 crc kubenswrapper[4906]: I0227 08:34:20.118812 4906 scope.go:117] "RemoveContainer" containerID="165c7fb8fec78a16af0b16a1473427c1b49fce6ffad5771cdf37aea8fce55524" Feb 27 08:34:20 crc kubenswrapper[4906]: I0227 08:34:20.156910 4906 scope.go:117] "RemoveContainer" containerID="d6c368e15d4049a5098eca7ffa96f4e195b0ff2c9ede0483017d2901bd6c1615" Feb 27 08:34:20 crc kubenswrapper[4906]: I0227 08:34:20.192740 4906 scope.go:117] "RemoveContainer" containerID="d26899b98e21f71ad38f0927112999810f9e0357294eab5df7e85cbb52773328" Feb 27 08:34:20 crc kubenswrapper[4906]: I0227 08:34:20.239310 4906 scope.go:117] "RemoveContainer" containerID="1cf80c7d032ca9a11ebeb24cd21b070e5e98ddf79f9cddb899127496a8df1635" Feb 27 08:34:20 crc kubenswrapper[4906]: I0227 08:34:20.304423 4906 scope.go:117] "RemoveContainer" containerID="6cdd124c668e1ba8367c578328695300cf2b99073a12a4972ad1f84b7c624c52" Feb 27 08:34:20 crc kubenswrapper[4906]: E0227 08:34:20.657778 4906 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" interval="1.6s" Feb 27 08:34:21 crc kubenswrapper[4906]: I0227 08:34:21.017961 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536354-2w5js" event={"ID":"41d7dad5-7a02-4698-9385-0673efb99b6a","Type":"ContainerStarted","Data":"6708667a2cf57f1dee07a8612bd25d94517ec0a1b55cb53f9c9397cb3bc00ebe"} Feb 27 08:34:21 crc kubenswrapper[4906]: E0227 08:34:21.163101 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:34:21Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:34:21Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:34:21Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-27T08:34:21Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:21 crc kubenswrapper[4906]: E0227 08:34:21.164212 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:21 crc kubenswrapper[4906]: E0227 08:34:21.164797 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:21 crc kubenswrapper[4906]: E0227 08:34:21.165235 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:21 crc kubenswrapper[4906]: E0227 08:34:21.165648 4906 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:21 crc kubenswrapper[4906]: E0227 08:34:21.165682 4906 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 27 08:34:21 crc kubenswrapper[4906]: I0227 08:34:21.551800 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:34:21 crc kubenswrapper[4906]: I0227 08:34:21.553229 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:21 crc kubenswrapper[4906]: I0227 08:34:21.554104 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:21 crc kubenswrapper[4906]: I0227 08:34:21.554571 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:21 crc kubenswrapper[4906]: I0227 08:34:21.555378 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:21 crc kubenswrapper[4906]: I0227 08:34:21.556066 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:21 crc kubenswrapper[4906]: I0227 08:34:21.556450 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:21 crc kubenswrapper[4906]: I0227 08:34:21.556926 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:21 crc kubenswrapper[4906]: I0227 08:34:21.557273 4906 status_manager.go:851] "Failed to get status for pod" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" pod="openshift-marketplace/redhat-marketplace-knxlc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-knxlc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:21 crc kubenswrapper[4906]: I0227 08:34:21.557603 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:21 crc kubenswrapper[4906]: I0227 08:34:21.570027 4906 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="165ec89d-3872-45c2-9529-c3c8430d3798" Feb 27 08:34:21 crc kubenswrapper[4906]: I0227 08:34:21.570087 4906 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="165ec89d-3872-45c2-9529-c3c8430d3798" Feb 27 08:34:21 crc kubenswrapper[4906]: E0227 08:34:21.570683 4906 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:34:21 crc kubenswrapper[4906]: I0227 08:34:21.571785 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:34:21 crc kubenswrapper[4906]: W0227 08:34:21.599775 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-b4a1c9fac9402742af1d530a2f48d52731bc3ab0fadfd6399b0e8deb6ad0cf6c WatchSource:0}: Error finding container b4a1c9fac9402742af1d530a2f48d52731bc3ab0fadfd6399b0e8deb6ad0cf6c: Status 404 returned error can't find the container with id b4a1c9fac9402742af1d530a2f48d52731bc3ab0fadfd6399b0e8deb6ad0cf6c Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.031049 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b4a1c9fac9402742af1d530a2f48d52731bc3ab0fadfd6399b0e8deb6ad0cf6c"} Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.033447 4906 generic.go:334] "Generic (PLEG): container finished" podID="41d7dad5-7a02-4698-9385-0673efb99b6a" containerID="6708667a2cf57f1dee07a8612bd25d94517ec0a1b55cb53f9c9397cb3bc00ebe" exitCode=0 Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.033502 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536354-2w5js" event={"ID":"41d7dad5-7a02-4698-9385-0673efb99b6a","Type":"ContainerDied","Data":"6708667a2cf57f1dee07a8612bd25d94517ec0a1b55cb53f9c9397cb3bc00ebe"} Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.034784 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.035536 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.036088 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.036841 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.037406 4906 status_manager.go:851] "Failed to get status for pod" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" pod="openshift-marketplace/redhat-marketplace-knxlc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-knxlc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.038088 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.038608 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.039074 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.039460 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.039928 4906 status_manager.go:851] "Failed to get status for pod" podUID="41d7dad5-7a02-4698-9385-0673efb99b6a" pod="openshift-infra/auto-csr-approver-29536354-2w5js" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-infra/pods/auto-csr-approver-29536354-2w5js\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: E0227 08:34:22.259567 4906 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" interval="3.2s" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.274477 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-knxlc" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.274573 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-knxlc" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.359602 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-knxlc" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.361025 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.361432 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.361729 4906 status_manager.go:851] "Failed to get status for pod" podUID="41d7dad5-7a02-4698-9385-0673efb99b6a" pod="openshift-infra/auto-csr-approver-29536354-2w5js" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-infra/pods/auto-csr-approver-29536354-2w5js\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.362308 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.363076 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.363379 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.363848 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.364374 4906 status_manager.go:851] "Failed to get status for pod" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" pod="openshift-marketplace/redhat-marketplace-knxlc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-knxlc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.364674 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.365218 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.556286 4906 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.556847 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.557104 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.557365 4906 status_manager.go:851] "Failed to get status for pod" podUID="41d7dad5-7a02-4698-9385-0673efb99b6a" pod="openshift-infra/auto-csr-approver-29536354-2w5js" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-infra/pods/auto-csr-approver-29536354-2w5js\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.557789 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.558277 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.559123 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.559940 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.560429 4906 status_manager.go:851] "Failed to get status for pod" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" pod="openshift-marketplace/redhat-marketplace-knxlc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-knxlc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.561039 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:22 crc kubenswrapper[4906]: I0227 08:34:22.561623 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.114902 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-knxlc" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.115598 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.115793 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.115972 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.116132 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.116287 4906 status_manager.go:851] "Failed to get status for pod" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" pod="openshift-marketplace/redhat-marketplace-knxlc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-knxlc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.116444 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.116591 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.116738 4906 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.116918 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.117097 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.117264 4906 status_manager.go:851] "Failed to get status for pod" podUID="41d7dad5-7a02-4698-9385-0673efb99b6a" pod="openshift-infra/auto-csr-approver-29536354-2w5js" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-infra/pods/auto-csr-approver-29536354-2w5js\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.333469 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536354-2w5js" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.334505 4906 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.335211 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.335864 4906 status_manager.go:851] "Failed to get status for pod" podUID="41d7dad5-7a02-4698-9385-0673efb99b6a" pod="openshift-infra/auto-csr-approver-29536354-2w5js" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-infra/pods/auto-csr-approver-29536354-2w5js\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.336222 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.336748 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.337265 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.337748 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.338246 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.338590 4906 status_manager.go:851] "Failed to get status for pod" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" pod="openshift-marketplace/redhat-marketplace-knxlc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-knxlc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.354614 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.355225 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.503605 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cxxgd\" (UniqueName: \"kubernetes.io/projected/41d7dad5-7a02-4698-9385-0673efb99b6a-kube-api-access-cxxgd\") pod \"41d7dad5-7a02-4698-9385-0673efb99b6a\" (UID: \"41d7dad5-7a02-4698-9385-0673efb99b6a\") " Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.510614 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41d7dad5-7a02-4698-9385-0673efb99b6a-kube-api-access-cxxgd" (OuterVolumeSpecName: "kube-api-access-cxxgd") pod "41d7dad5-7a02-4698-9385-0673efb99b6a" (UID: "41d7dad5-7a02-4698-9385-0673efb99b6a"). InnerVolumeSpecName "kube-api-access-cxxgd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:34:23 crc kubenswrapper[4906]: I0227 08:34:23.605630 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cxxgd\" (UniqueName: \"kubernetes.io/projected/41d7dad5-7a02-4698-9385-0673efb99b6a-kube-api-access-cxxgd\") on node \"crc\" DevicePath \"\"" Feb 27 08:34:24 crc kubenswrapper[4906]: I0227 08:34:24.049579 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536354-2w5js" Feb 27 08:34:24 crc kubenswrapper[4906]: I0227 08:34:24.049611 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536354-2w5js" event={"ID":"41d7dad5-7a02-4698-9385-0673efb99b6a","Type":"ContainerDied","Data":"280513a7044aee48b28524772c5bd53875be208246bdf0c8b72a9da2e2a06185"} Feb 27 08:34:24 crc kubenswrapper[4906]: I0227 08:34:24.049695 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="280513a7044aee48b28524772c5bd53875be208246bdf0c8b72a9da2e2a06185" Feb 27 08:34:24 crc kubenswrapper[4906]: I0227 08:34:24.067458 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:24 crc kubenswrapper[4906]: I0227 08:34:24.068307 4906 status_manager.go:851] "Failed to get status for pod" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" pod="openshift-marketplace/redhat-marketplace-knxlc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-knxlc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:24 crc kubenswrapper[4906]: I0227 08:34:24.068808 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:24 crc kubenswrapper[4906]: I0227 08:34:24.069176 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:24 crc kubenswrapper[4906]: I0227 08:34:24.069557 4906 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:24 crc kubenswrapper[4906]: I0227 08:34:24.070045 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:24 crc kubenswrapper[4906]: I0227 08:34:24.070376 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:24 crc kubenswrapper[4906]: I0227 08:34:24.070817 4906 status_manager.go:851] "Failed to get status for pod" podUID="41d7dad5-7a02-4698-9385-0673efb99b6a" pod="openshift-infra/auto-csr-approver-29536354-2w5js" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-infra/pods/auto-csr-approver-29536354-2w5js\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:24 crc kubenswrapper[4906]: I0227 08:34:24.071295 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:24 crc kubenswrapper[4906]: I0227 08:34:24.071538 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:24 crc kubenswrapper[4906]: I0227 08:34:24.071789 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:24 crc kubenswrapper[4906]: E0227 08:34:24.674429 4906 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.2:6443: connect: connection refused" event="&Event{ObjectMeta:{redhat-marketplace-jxdrs.18980d75b80d9940 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:redhat-marketplace-jxdrs,UID:b8b3f22b-652a-4703-bfdb-520d2f90867a,APIVersion:v1,ResourceVersion:28691,FieldPath:spec.containers{registry-server},},Reason:Unhealthy,Message:Readiness probe errored: rpc error: code = NotFound desc = container is not created or running: checking if PID of 4e5350a592312e8ac376ba58b577f7ffff7359ac65614368607816810d353e26 is running failed: container process not found,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-27 08:34:12.62052384 +0000 UTC m=+351.014925460,LastTimestamp:2026-02-27 08:34:12.62052384 +0000 UTC m=+351.014925460,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 27 08:34:25 crc kubenswrapper[4906]: I0227 08:34:25.060225 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0fd52d4cdb14af6ab80eb96519345edae4cc861b39f76323880d14a3d71c768c"} Feb 27 08:34:25 crc kubenswrapper[4906]: E0227 08:34:25.461212 4906 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.2:6443: connect: connection refused" interval="6.4s" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.073153 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/cluster-policy-controller/0.log" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.074038 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.074114 4906 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="66e419c34471770e8be9908460758e1b0b826937ec72511e5e9a8f17f54b2bec" exitCode=1 Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.074205 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"66e419c34471770e8be9908460758e1b0b826937ec72511e5e9a8f17f54b2bec"} Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.074600 4906 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="165ec89d-3872-45c2-9529-c3c8430d3798" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.074625 4906 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="165ec89d-3872-45c2-9529-c3c8430d3798" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.075156 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: E0227 08:34:26.075165 4906 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.075486 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.075576 4906 scope.go:117] "RemoveContainer" containerID="66e419c34471770e8be9908460758e1b0b826937ec72511e5e9a8f17f54b2bec" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.075760 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.076040 4906 status_manager.go:851] "Failed to get status for pod" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" pod="openshift-marketplace/redhat-marketplace-knxlc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-knxlc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.076302 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.078550 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.079062 4906 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.079685 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.080132 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.080556 4906 status_manager.go:851] "Failed to get status for pod" podUID="41d7dad5-7a02-4698-9385-0673efb99b6a" pod="openshift-infra/auto-csr-approver-29536354-2w5js" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-infra/pods/auto-csr-approver-29536354-2w5js\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.081196 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.081640 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.082043 4906 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.082458 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.082787 4906 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.083284 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.083739 4906 status_manager.go:851] "Failed to get status for pod" podUID="41d7dad5-7a02-4698-9385-0673efb99b6a" pod="openshift-infra/auto-csr-approver-29536354-2w5js" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-infra/pods/auto-csr-approver-29536354-2w5js\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.084119 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.084472 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.084948 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.085360 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.085691 4906 status_manager.go:851] "Failed to get status for pod" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" pod="openshift-marketplace/redhat-marketplace-knxlc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-knxlc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:26 crc kubenswrapper[4906]: I0227 08:34:26.086313 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:27 crc kubenswrapper[4906]: I0227 08:34:27.005817 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:34:27 crc kubenswrapper[4906]: I0227 08:34:27.083164 4906 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="0fd52d4cdb14af6ab80eb96519345edae4cc861b39f76323880d14a3d71c768c" exitCode=0 Feb 27 08:34:27 crc kubenswrapper[4906]: I0227 08:34:27.083221 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"0fd52d4cdb14af6ab80eb96519345edae4cc861b39f76323880d14a3d71c768c"} Feb 27 08:34:27 crc kubenswrapper[4906]: I0227 08:34:27.083606 4906 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="165ec89d-3872-45c2-9529-c3c8430d3798" Feb 27 08:34:27 crc kubenswrapper[4906]: I0227 08:34:27.083623 4906 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="165ec89d-3872-45c2-9529-c3c8430d3798" Feb 27 08:34:27 crc kubenswrapper[4906]: E0227 08:34:27.083986 4906 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:34:27 crc kubenswrapper[4906]: I0227 08:34:27.084111 4906 status_manager.go:851] "Failed to get status for pod" podUID="a71a873c-34c6-45e8-9923-816350816e82" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:27 crc kubenswrapper[4906]: I0227 08:34:27.084839 4906 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:27 crc kubenswrapper[4906]: I0227 08:34:27.086212 4906 status_manager.go:851] "Failed to get status for pod" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" pod="openshift-marketplace/redhat-operators-zb9z6" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-zb9z6\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:27 crc kubenswrapper[4906]: I0227 08:34:27.087267 4906 status_manager.go:851] "Failed to get status for pod" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" pod="openshift-marketplace/redhat-marketplace-knxlc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-knxlc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:27 crc kubenswrapper[4906]: I0227 08:34:27.088119 4906 status_manager.go:851] "Failed to get status for pod" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" pod="openshift-marketplace/redhat-operators-rtknh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-rtknh\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:27 crc kubenswrapper[4906]: I0227 08:34:27.088464 4906 status_manager.go:851] "Failed to get status for pod" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" pod="openshift-marketplace/community-operators-zn5pm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-zn5pm\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:27 crc kubenswrapper[4906]: I0227 08:34:27.088786 4906 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:27 crc kubenswrapper[4906]: I0227 08:34:27.089330 4906 status_manager.go:851] "Failed to get status for pod" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" pod="openshift-marketplace/redhat-marketplace-jxdrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-jxdrs\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:27 crc kubenswrapper[4906]: I0227 08:34:27.089894 4906 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:27 crc kubenswrapper[4906]: I0227 08:34:27.090222 4906 status_manager.go:851] "Failed to get status for pod" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" pod="openshift-marketplace/certified-operators-hw96v" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-hw96v\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:27 crc kubenswrapper[4906]: I0227 08:34:27.090590 4906 status_manager.go:851] "Failed to get status for pod" podUID="41d7dad5-7a02-4698-9385-0673efb99b6a" pod="openshift-infra/auto-csr-approver-29536354-2w5js" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-infra/pods/auto-csr-approver-29536354-2w5js\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:27 crc kubenswrapper[4906]: I0227 08:34:27.091127 4906 status_manager.go:851] "Failed to get status for pod" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" pod="openshift-marketplace/community-operators-t5l4n" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-t5l4n\": dial tcp 38.102.83.2:6443: connect: connection refused" Feb 27 08:34:28 crc kubenswrapper[4906]: I0227 08:34:28.094571 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"4e0067a91c0921a5cbcc2fda68f595b6e3674c1281fb74808e0a42d8f8edb604"} Feb 27 08:34:28 crc kubenswrapper[4906]: I0227 08:34:28.102680 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/cluster-policy-controller/0.log" Feb 27 08:34:28 crc kubenswrapper[4906]: I0227 08:34:28.106613 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Feb 27 08:34:28 crc kubenswrapper[4906]: I0227 08:34:28.106711 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"6a72344952ff67d3cefb46a8487cec0c8b6ad9df005f2b4af65c723487cb2e41"} Feb 27 08:34:29 crc kubenswrapper[4906]: I0227 08:34:29.118211 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0e78e8cea2d9be7b1bdab3dd418587178b03aad29181604842e117e8ce07ed94"} Feb 27 08:34:29 crc kubenswrapper[4906]: I0227 08:34:29.407199 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:34:29 crc kubenswrapper[4906]: I0227 08:34:29.407740 4906 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Feb 27 08:34:29 crc kubenswrapper[4906]: I0227 08:34:29.407828 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Feb 27 08:34:31 crc kubenswrapper[4906]: I0227 08:34:31.135214 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"966c3cf79c604296c0f4c7f768f5e1aa355c32694d864885b067a810a3515644"} Feb 27 08:34:32 crc kubenswrapper[4906]: I0227 08:34:32.146525 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"4f5ebce019932dc4c0caa1c63217fb8aff16987a1b48fded83b35094ca2318b6"} Feb 27 08:34:32 crc kubenswrapper[4906]: I0227 08:34:32.146994 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:34:32 crc kubenswrapper[4906]: I0227 08:34:32.147007 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0a2e82f8034c5d202687a88bc2e3f462f93d3f4962fdd88b14806c313941c8b3"} Feb 27 08:34:32 crc kubenswrapper[4906]: I0227 08:34:32.147065 4906 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="165ec89d-3872-45c2-9529-c3c8430d3798" Feb 27 08:34:32 crc kubenswrapper[4906]: I0227 08:34:32.147108 4906 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="165ec89d-3872-45c2-9529-c3c8430d3798" Feb 27 08:34:32 crc kubenswrapper[4906]: I0227 08:34:32.155072 4906 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:34:33 crc kubenswrapper[4906]: I0227 08:34:33.153266 4906 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="165ec89d-3872-45c2-9529-c3c8430d3798" Feb 27 08:34:33 crc kubenswrapper[4906]: I0227 08:34:33.153744 4906 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="165ec89d-3872-45c2-9529-c3c8430d3798" Feb 27 08:34:34 crc kubenswrapper[4906]: I0227 08:34:34.232125 4906 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="6afdecff-5e2c-40f1-a651-8323cc8cfd76" Feb 27 08:34:37 crc kubenswrapper[4906]: I0227 08:34:37.006271 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:34:39 crc kubenswrapper[4906]: I0227 08:34:39.411776 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:34:39 crc kubenswrapper[4906]: I0227 08:34:39.422178 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 27 08:34:43 crc kubenswrapper[4906]: I0227 08:34:43.912233 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Feb 27 08:34:44 crc kubenswrapper[4906]: I0227 08:34:44.306922 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Feb 27 08:34:44 crc kubenswrapper[4906]: I0227 08:34:44.463323 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Feb 27 08:34:46 crc kubenswrapper[4906]: I0227 08:34:46.192246 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Feb 27 08:34:46 crc kubenswrapper[4906]: I0227 08:34:46.550209 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Feb 27 08:34:46 crc kubenswrapper[4906]: I0227 08:34:46.759356 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Feb 27 08:34:46 crc kubenswrapper[4906]: I0227 08:34:46.999322 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Feb 27 08:34:47 crc kubenswrapper[4906]: I0227 08:34:47.295570 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Feb 27 08:34:47 crc kubenswrapper[4906]: I0227 08:34:47.301581 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 27 08:34:47 crc kubenswrapper[4906]: I0227 08:34:47.355949 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Feb 27 08:34:47 crc kubenswrapper[4906]: I0227 08:34:47.491970 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Feb 27 08:34:47 crc kubenswrapper[4906]: I0227 08:34:47.654862 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Feb 27 08:34:47 crc kubenswrapper[4906]: I0227 08:34:47.662371 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Feb 27 08:34:47 crc kubenswrapper[4906]: I0227 08:34:47.881910 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Feb 27 08:34:47 crc kubenswrapper[4906]: I0227 08:34:47.932007 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Feb 27 08:34:48 crc kubenswrapper[4906]: I0227 08:34:48.025979 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Feb 27 08:34:48 crc kubenswrapper[4906]: I0227 08:34:48.028714 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Feb 27 08:34:48 crc kubenswrapper[4906]: I0227 08:34:48.060493 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Feb 27 08:34:48 crc kubenswrapper[4906]: I0227 08:34:48.070258 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Feb 27 08:34:48 crc kubenswrapper[4906]: I0227 08:34:48.332919 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Feb 27 08:34:48 crc kubenswrapper[4906]: I0227 08:34:48.376060 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Feb 27 08:34:48 crc kubenswrapper[4906]: I0227 08:34:48.514757 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Feb 27 08:34:48 crc kubenswrapper[4906]: I0227 08:34:48.558351 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Feb 27 08:34:48 crc kubenswrapper[4906]: I0227 08:34:48.564566 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Feb 27 08:34:48 crc kubenswrapper[4906]: I0227 08:34:48.593391 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Feb 27 08:34:48 crc kubenswrapper[4906]: I0227 08:34:48.713910 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Feb 27 08:34:48 crc kubenswrapper[4906]: I0227 08:34:48.741694 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Feb 27 08:34:48 crc kubenswrapper[4906]: I0227 08:34:48.816568 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Feb 27 08:34:48 crc kubenswrapper[4906]: I0227 08:34:48.892547 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Feb 27 08:34:48 crc kubenswrapper[4906]: I0227 08:34:48.940262 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Feb 27 08:34:49 crc kubenswrapper[4906]: I0227 08:34:49.071167 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 27 08:34:49 crc kubenswrapper[4906]: I0227 08:34:49.270014 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Feb 27 08:34:49 crc kubenswrapper[4906]: I0227 08:34:49.344125 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Feb 27 08:34:49 crc kubenswrapper[4906]: I0227 08:34:49.369316 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Feb 27 08:34:49 crc kubenswrapper[4906]: I0227 08:34:49.395598 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Feb 27 08:34:49 crc kubenswrapper[4906]: I0227 08:34:49.459207 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Feb 27 08:34:49 crc kubenswrapper[4906]: I0227 08:34:49.487184 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Feb 27 08:34:49 crc kubenswrapper[4906]: I0227 08:34:49.644658 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Feb 27 08:34:49 crc kubenswrapper[4906]: I0227 08:34:49.678112 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Feb 27 08:34:49 crc kubenswrapper[4906]: I0227 08:34:49.689849 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Feb 27 08:34:49 crc kubenswrapper[4906]: I0227 08:34:49.877794 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Feb 27 08:34:49 crc kubenswrapper[4906]: I0227 08:34:49.907047 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Feb 27 08:34:50 crc kubenswrapper[4906]: I0227 08:34:50.038239 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Feb 27 08:34:50 crc kubenswrapper[4906]: I0227 08:34:50.119009 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 27 08:34:50 crc kubenswrapper[4906]: I0227 08:34:50.237269 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Feb 27 08:34:50 crc kubenswrapper[4906]: I0227 08:34:50.327761 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Feb 27 08:34:50 crc kubenswrapper[4906]: I0227 08:34:50.391762 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Feb 27 08:34:50 crc kubenswrapper[4906]: I0227 08:34:50.426228 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Feb 27 08:34:50 crc kubenswrapper[4906]: I0227 08:34:50.532642 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Feb 27 08:34:50 crc kubenswrapper[4906]: I0227 08:34:50.566192 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Feb 27 08:34:50 crc kubenswrapper[4906]: I0227 08:34:50.643588 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Feb 27 08:34:50 crc kubenswrapper[4906]: I0227 08:34:50.682041 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Feb 27 08:34:50 crc kubenswrapper[4906]: I0227 08:34:50.714492 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Feb 27 08:34:50 crc kubenswrapper[4906]: I0227 08:34:50.867682 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Feb 27 08:34:50 crc kubenswrapper[4906]: I0227 08:34:50.927053 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 27 08:34:50 crc kubenswrapper[4906]: I0227 08:34:50.998320 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Feb 27 08:34:51 crc kubenswrapper[4906]: I0227 08:34:51.072774 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Feb 27 08:34:51 crc kubenswrapper[4906]: I0227 08:34:51.090013 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Feb 27 08:34:51 crc kubenswrapper[4906]: I0227 08:34:51.166537 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Feb 27 08:34:51 crc kubenswrapper[4906]: I0227 08:34:51.189073 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Feb 27 08:34:51 crc kubenswrapper[4906]: I0227 08:34:51.394411 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Feb 27 08:34:51 crc kubenswrapper[4906]: I0227 08:34:51.420266 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Feb 27 08:34:51 crc kubenswrapper[4906]: I0227 08:34:51.423856 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Feb 27 08:34:51 crc kubenswrapper[4906]: I0227 08:34:51.441921 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Feb 27 08:34:51 crc kubenswrapper[4906]: I0227 08:34:51.455022 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Feb 27 08:34:51 crc kubenswrapper[4906]: I0227 08:34:51.667083 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Feb 27 08:34:51 crc kubenswrapper[4906]: I0227 08:34:51.696785 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Feb 27 08:34:51 crc kubenswrapper[4906]: I0227 08:34:51.752476 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 27 08:34:51 crc kubenswrapper[4906]: I0227 08:34:51.763480 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Feb 27 08:34:51 crc kubenswrapper[4906]: I0227 08:34:51.768177 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Feb 27 08:34:51 crc kubenswrapper[4906]: I0227 08:34:51.957316 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Feb 27 08:34:52 crc kubenswrapper[4906]: I0227 08:34:52.050831 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Feb 27 08:34:52 crc kubenswrapper[4906]: I0227 08:34:52.059115 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Feb 27 08:34:52 crc kubenswrapper[4906]: I0227 08:34:52.237590 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Feb 27 08:34:52 crc kubenswrapper[4906]: I0227 08:34:52.336693 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Feb 27 08:34:52 crc kubenswrapper[4906]: I0227 08:34:52.497904 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Feb 27 08:34:52 crc kubenswrapper[4906]: I0227 08:34:52.608019 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Feb 27 08:34:52 crc kubenswrapper[4906]: I0227 08:34:52.627281 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Feb 27 08:34:52 crc kubenswrapper[4906]: I0227 08:34:52.674289 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Feb 27 08:34:52 crc kubenswrapper[4906]: I0227 08:34:52.697290 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 27 08:34:52 crc kubenswrapper[4906]: I0227 08:34:52.713070 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Feb 27 08:34:52 crc kubenswrapper[4906]: I0227 08:34:52.776266 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Feb 27 08:34:52 crc kubenswrapper[4906]: I0227 08:34:52.776310 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Feb 27 08:34:52 crc kubenswrapper[4906]: I0227 08:34:52.778523 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Feb 27 08:34:52 crc kubenswrapper[4906]: I0227 08:34:52.808675 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Feb 27 08:34:52 crc kubenswrapper[4906]: I0227 08:34:52.830457 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Feb 27 08:34:52 crc kubenswrapper[4906]: I0227 08:34:52.859614 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Feb 27 08:34:53 crc kubenswrapper[4906]: I0227 08:34:53.071215 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Feb 27 08:34:53 crc kubenswrapper[4906]: I0227 08:34:53.116325 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Feb 27 08:34:53 crc kubenswrapper[4906]: I0227 08:34:53.122087 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Feb 27 08:34:53 crc kubenswrapper[4906]: I0227 08:34:53.202685 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Feb 27 08:34:53 crc kubenswrapper[4906]: I0227 08:34:53.247755 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Feb 27 08:34:53 crc kubenswrapper[4906]: I0227 08:34:53.327087 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Feb 27 08:34:53 crc kubenswrapper[4906]: I0227 08:34:53.327814 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Feb 27 08:34:53 crc kubenswrapper[4906]: I0227 08:34:53.408995 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Feb 27 08:34:53 crc kubenswrapper[4906]: I0227 08:34:53.524854 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Feb 27 08:34:53 crc kubenswrapper[4906]: I0227 08:34:53.525747 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Feb 27 08:34:53 crc kubenswrapper[4906]: I0227 08:34:53.828836 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Feb 27 08:34:53 crc kubenswrapper[4906]: I0227 08:34:53.886011 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Feb 27 08:34:54 crc kubenswrapper[4906]: I0227 08:34:54.018823 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Feb 27 08:34:54 crc kubenswrapper[4906]: I0227 08:34:54.137643 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Feb 27 08:34:54 crc kubenswrapper[4906]: I0227 08:34:54.185795 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Feb 27 08:34:54 crc kubenswrapper[4906]: I0227 08:34:54.196294 4906 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Feb 27 08:34:54 crc kubenswrapper[4906]: I0227 08:34:54.255199 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Feb 27 08:34:54 crc kubenswrapper[4906]: I0227 08:34:54.261237 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Feb 27 08:34:54 crc kubenswrapper[4906]: I0227 08:34:54.287755 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Feb 27 08:34:54 crc kubenswrapper[4906]: I0227 08:34:54.305737 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Feb 27 08:34:54 crc kubenswrapper[4906]: I0227 08:34:54.341242 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Feb 27 08:34:54 crc kubenswrapper[4906]: I0227 08:34:54.503674 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Feb 27 08:34:54 crc kubenswrapper[4906]: I0227 08:34:54.521638 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Feb 27 08:34:54 crc kubenswrapper[4906]: I0227 08:34:54.555154 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Feb 27 08:34:54 crc kubenswrapper[4906]: I0227 08:34:54.648010 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Feb 27 08:34:54 crc kubenswrapper[4906]: I0227 08:34:54.680292 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Feb 27 08:34:54 crc kubenswrapper[4906]: I0227 08:34:54.798824 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Feb 27 08:34:54 crc kubenswrapper[4906]: I0227 08:34:54.992741 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Feb 27 08:34:55 crc kubenswrapper[4906]: I0227 08:34:55.038205 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 27 08:34:55 crc kubenswrapper[4906]: I0227 08:34:55.138020 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Feb 27 08:34:55 crc kubenswrapper[4906]: I0227 08:34:55.340532 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Feb 27 08:34:55 crc kubenswrapper[4906]: I0227 08:34:55.534820 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Feb 27 08:34:55 crc kubenswrapper[4906]: I0227 08:34:55.583282 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Feb 27 08:34:55 crc kubenswrapper[4906]: I0227 08:34:55.608969 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Feb 27 08:34:55 crc kubenswrapper[4906]: I0227 08:34:55.807085 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Feb 27 08:34:55 crc kubenswrapper[4906]: I0227 08:34:55.816553 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Feb 27 08:34:55 crc kubenswrapper[4906]: I0227 08:34:55.929026 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Feb 27 08:34:56 crc kubenswrapper[4906]: I0227 08:34:56.019444 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Feb 27 08:34:56 crc kubenswrapper[4906]: I0227 08:34:56.071598 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Feb 27 08:34:56 crc kubenswrapper[4906]: I0227 08:34:56.157673 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Feb 27 08:34:56 crc kubenswrapper[4906]: I0227 08:34:56.165665 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Feb 27 08:34:56 crc kubenswrapper[4906]: I0227 08:34:56.197778 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Feb 27 08:34:56 crc kubenswrapper[4906]: I0227 08:34:56.208700 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 27 08:34:56 crc kubenswrapper[4906]: I0227 08:34:56.247260 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Feb 27 08:34:56 crc kubenswrapper[4906]: I0227 08:34:56.300918 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 27 08:34:56 crc kubenswrapper[4906]: I0227 08:34:56.360313 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Feb 27 08:34:56 crc kubenswrapper[4906]: I0227 08:34:56.367683 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Feb 27 08:34:56 crc kubenswrapper[4906]: I0227 08:34:56.493726 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Feb 27 08:34:56 crc kubenswrapper[4906]: I0227 08:34:56.620901 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 27 08:34:56 crc kubenswrapper[4906]: I0227 08:34:56.695755 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Feb 27 08:34:56 crc kubenswrapper[4906]: I0227 08:34:56.919439 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Feb 27 08:34:56 crc kubenswrapper[4906]: I0227 08:34:56.952759 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Feb 27 08:34:57 crc kubenswrapper[4906]: I0227 08:34:57.057898 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Feb 27 08:34:57 crc kubenswrapper[4906]: I0227 08:34:57.263299 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Feb 27 08:34:57 crc kubenswrapper[4906]: I0227 08:34:57.301097 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 27 08:34:57 crc kubenswrapper[4906]: I0227 08:34:57.370519 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Feb 27 08:34:57 crc kubenswrapper[4906]: I0227 08:34:57.436552 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Feb 27 08:34:57 crc kubenswrapper[4906]: I0227 08:34:57.581495 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Feb 27 08:34:57 crc kubenswrapper[4906]: I0227 08:34:57.646386 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Feb 27 08:34:58 crc kubenswrapper[4906]: I0227 08:34:58.023136 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Feb 27 08:34:58 crc kubenswrapper[4906]: I0227 08:34:58.112049 4906 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Feb 27 08:34:58 crc kubenswrapper[4906]: I0227 08:34:58.356669 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Feb 27 08:34:58 crc kubenswrapper[4906]: I0227 08:34:58.691491 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 27 08:34:58 crc kubenswrapper[4906]: I0227 08:34:58.740466 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Feb 27 08:34:58 crc kubenswrapper[4906]: I0227 08:34:58.760534 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Feb 27 08:34:58 crc kubenswrapper[4906]: I0227 08:34:58.801441 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Feb 27 08:34:58 crc kubenswrapper[4906]: I0227 08:34:58.832095 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Feb 27 08:34:59 crc kubenswrapper[4906]: I0227 08:34:59.569759 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Feb 27 08:34:59 crc kubenswrapper[4906]: I0227 08:34:59.634346 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Feb 27 08:34:59 crc kubenswrapper[4906]: I0227 08:34:59.692697 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Feb 27 08:34:59 crc kubenswrapper[4906]: I0227 08:34:59.731987 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Feb 27 08:34:59 crc kubenswrapper[4906]: I0227 08:34:59.797536 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 27 08:34:59 crc kubenswrapper[4906]: I0227 08:34:59.891018 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Feb 27 08:35:00 crc kubenswrapper[4906]: I0227 08:35:00.041390 4906 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Feb 27 08:35:00 crc kubenswrapper[4906]: I0227 08:35:00.341120 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Feb 27 08:35:00 crc kubenswrapper[4906]: I0227 08:35:00.448216 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Feb 27 08:35:00 crc kubenswrapper[4906]: I0227 08:35:00.489784 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Feb 27 08:35:00 crc kubenswrapper[4906]: I0227 08:35:00.519472 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Feb 27 08:35:00 crc kubenswrapper[4906]: I0227 08:35:00.843035 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Feb 27 08:35:07 crc kubenswrapper[4906]: I0227 08:35:07.981428 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Feb 27 08:35:10 crc kubenswrapper[4906]: I0227 08:35:10.853487 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Feb 27 08:35:11 crc kubenswrapper[4906]: I0227 08:35:11.833805 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Feb 27 08:35:12 crc kubenswrapper[4906]: I0227 08:35:12.200867 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Feb 27 08:35:13 crc kubenswrapper[4906]: I0227 08:35:13.486331 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 27 08:35:13 crc kubenswrapper[4906]: I0227 08:35:13.775720 4906 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Feb 27 08:35:13 crc kubenswrapper[4906]: I0227 08:35:13.778906 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-knxlc" podStartSLOduration=67.267829056 podStartE2EDuration="3m2.778856234s" podCreationTimestamp="2026-02-27 08:32:11 +0000 UTC" firstStartedPulling="2026-02-27 08:32:17.831609702 +0000 UTC m=+236.226011332" lastFinishedPulling="2026-02-27 08:34:13.34263689 +0000 UTC m=+351.737038510" observedRunningTime="2026-02-27 08:34:34.401973581 +0000 UTC m=+372.796375181" watchObservedRunningTime="2026-02-27 08:35:13.778856234 +0000 UTC m=+412.173257844" Feb 27 08:35:13 crc kubenswrapper[4906]: I0227 08:35:13.780221 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=65.780213003 podStartE2EDuration="1m5.780213003s" podCreationTimestamp="2026-02-27 08:34:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:34:34.3592007 +0000 UTC m=+372.753602310" watchObservedRunningTime="2026-02-27 08:35:13.780213003 +0000 UTC m=+412.174614613" Feb 27 08:35:13 crc kubenswrapper[4906]: I0227 08:35:13.781251 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jxdrs","openshift-kube-apiserver/kube-apiserver-crc","openshift-marketplace/community-operators-zn5pm"] Feb 27 08:35:13 crc kubenswrapper[4906]: I0227 08:35:13.781311 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 27 08:35:13 crc kubenswrapper[4906]: I0227 08:35:13.781332 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb","openshift-controller-manager/controller-manager-5b699f89b4-tsf9z"] Feb 27 08:35:13 crc kubenswrapper[4906]: I0227 08:35:13.781780 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" podUID="fdba3d5b-a88a-48de-a3ab-7306545204f2" containerName="route-controller-manager" containerID="cri-o://62b4e0b014ebe1da7dd7d861f68db7b7d04265dc91934c6e3a43cd727c95a4bc" gracePeriod=30 Feb 27 08:35:13 crc kubenswrapper[4906]: I0227 08:35:13.782132 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" podUID="21a48fc5-0546-4804-9ad9-4de5fcef0cb0" containerName="controller-manager" containerID="cri-o://f76ba051201b959a58e82b74bb147b82c6b1af758965fa7f5b2c33686c925508" gracePeriod=30 Feb 27 08:35:13 crc kubenswrapper[4906]: I0227 08:35:13.811962 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=41.811933871 podStartE2EDuration="41.811933871s" podCreationTimestamp="2026-02-27 08:34:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:35:13.810296144 +0000 UTC m=+412.204697794" watchObservedRunningTime="2026-02-27 08:35:13.811933871 +0000 UTC m=+412.206335511" Feb 27 08:35:13 crc kubenswrapper[4906]: I0227 08:35:13.881707 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.013703 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.421583 4906 generic.go:334] "Generic (PLEG): container finished" podID="fdba3d5b-a88a-48de-a3ab-7306545204f2" containerID="62b4e0b014ebe1da7dd7d861f68db7b7d04265dc91934c6e3a43cd727c95a4bc" exitCode=0 Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.421687 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" event={"ID":"fdba3d5b-a88a-48de-a3ab-7306545204f2","Type":"ContainerDied","Data":"62b4e0b014ebe1da7dd7d861f68db7b7d04265dc91934c6e3a43cd727c95a4bc"} Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.423579 4906 generic.go:334] "Generic (PLEG): container finished" podID="21a48fc5-0546-4804-9ad9-4de5fcef0cb0" containerID="f76ba051201b959a58e82b74bb147b82c6b1af758965fa7f5b2c33686c925508" exitCode=0 Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.423619 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" event={"ID":"21a48fc5-0546-4804-9ad9-4de5fcef0cb0","Type":"ContainerDied","Data":"f76ba051201b959a58e82b74bb147b82c6b1af758965fa7f5b2c33686c925508"} Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.481497 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.566820 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" path="/var/lib/kubelet/pods/2604542b-6cc4-44dc-ab74-f493ac742db9/volumes" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.567573 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" path="/var/lib/kubelet/pods/b8b3f22b-652a-4703-bfdb-520d2f90867a/volumes" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.700549 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.764960 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.896996 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.901870 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.928770 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd"] Feb 27 08:35:14 crc kubenswrapper[4906]: E0227 08:35:14.929118 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41d7dad5-7a02-4698-9385-0673efb99b6a" containerName="oc" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.929139 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="41d7dad5-7a02-4698-9385-0673efb99b6a" containerName="oc" Feb 27 08:35:14 crc kubenswrapper[4906]: E0227 08:35:14.929149 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" containerName="registry-server" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.929155 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" containerName="registry-server" Feb 27 08:35:14 crc kubenswrapper[4906]: E0227 08:35:14.929165 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" containerName="extract-utilities" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.929172 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" containerName="extract-utilities" Feb 27 08:35:14 crc kubenswrapper[4906]: E0227 08:35:14.929187 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a71a873c-34c6-45e8-9923-816350816e82" containerName="installer" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.929194 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="a71a873c-34c6-45e8-9923-816350816e82" containerName="installer" Feb 27 08:35:14 crc kubenswrapper[4906]: E0227 08:35:14.929201 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdba3d5b-a88a-48de-a3ab-7306545204f2" containerName="route-controller-manager" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.929207 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdba3d5b-a88a-48de-a3ab-7306545204f2" containerName="route-controller-manager" Feb 27 08:35:14 crc kubenswrapper[4906]: E0227 08:35:14.929217 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" containerName="extract-utilities" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.929222 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" containerName="extract-utilities" Feb 27 08:35:14 crc kubenswrapper[4906]: E0227 08:35:14.929232 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" containerName="extract-content" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.929238 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" containerName="extract-content" Feb 27 08:35:14 crc kubenswrapper[4906]: E0227 08:35:14.929278 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" containerName="registry-server" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.929286 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" containerName="registry-server" Feb 27 08:35:14 crc kubenswrapper[4906]: E0227 08:35:14.929294 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21a48fc5-0546-4804-9ad9-4de5fcef0cb0" containerName="controller-manager" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.929300 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="21a48fc5-0546-4804-9ad9-4de5fcef0cb0" containerName="controller-manager" Feb 27 08:35:14 crc kubenswrapper[4906]: E0227 08:35:14.929314 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" containerName="extract-content" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.929320 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" containerName="extract-content" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.929442 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdba3d5b-a88a-48de-a3ab-7306545204f2" containerName="route-controller-manager" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.929457 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8b3f22b-652a-4703-bfdb-520d2f90867a" containerName="registry-server" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.929476 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="41d7dad5-7a02-4698-9385-0673efb99b6a" containerName="oc" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.929484 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="a71a873c-34c6-45e8-9923-816350816e82" containerName="installer" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.929497 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="21a48fc5-0546-4804-9ad9-4de5fcef0cb0" containerName="controller-manager" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.929506 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="2604542b-6cc4-44dc-ab74-f493ac742db9" containerName="registry-server" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.929935 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.943951 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd"] Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.953032 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlk7x\" (UniqueName: \"kubernetes.io/projected/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-kube-api-access-hlk7x\") pod \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\" (UID: \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\") " Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.953128 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-config\") pod \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\" (UID: \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\") " Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.953173 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-proxy-ca-bundles\") pod \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\" (UID: \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\") " Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.953262 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-serving-cert\") pod \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\" (UID: \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\") " Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.953303 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5w8gx\" (UniqueName: \"kubernetes.io/projected/fdba3d5b-a88a-48de-a3ab-7306545204f2-kube-api-access-5w8gx\") pod \"fdba3d5b-a88a-48de-a3ab-7306545204f2\" (UID: \"fdba3d5b-a88a-48de-a3ab-7306545204f2\") " Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.953359 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fdba3d5b-a88a-48de-a3ab-7306545204f2-serving-cert\") pod \"fdba3d5b-a88a-48de-a3ab-7306545204f2\" (UID: \"fdba3d5b-a88a-48de-a3ab-7306545204f2\") " Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.953434 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-client-ca\") pod \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\" (UID: \"21a48fc5-0546-4804-9ad9-4de5fcef0cb0\") " Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.953465 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fdba3d5b-a88a-48de-a3ab-7306545204f2-client-ca\") pod \"fdba3d5b-a88a-48de-a3ab-7306545204f2\" (UID: \"fdba3d5b-a88a-48de-a3ab-7306545204f2\") " Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.953510 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdba3d5b-a88a-48de-a3ab-7306545204f2-config\") pod \"fdba3d5b-a88a-48de-a3ab-7306545204f2\" (UID: \"fdba3d5b-a88a-48de-a3ab-7306545204f2\") " Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.955249 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdba3d5b-a88a-48de-a3ab-7306545204f2-config" (OuterVolumeSpecName: "config") pod "fdba3d5b-a88a-48de-a3ab-7306545204f2" (UID: "fdba3d5b-a88a-48de-a3ab-7306545204f2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.961030 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-config" (OuterVolumeSpecName: "config") pod "21a48fc5-0546-4804-9ad9-4de5fcef0cb0" (UID: "21a48fc5-0546-4804-9ad9-4de5fcef0cb0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.961395 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-client-ca" (OuterVolumeSpecName: "client-ca") pod "21a48fc5-0546-4804-9ad9-4de5fcef0cb0" (UID: "21a48fc5-0546-4804-9ad9-4de5fcef0cb0"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.963263 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fdba3d5b-a88a-48de-a3ab-7306545204f2-client-ca" (OuterVolumeSpecName: "client-ca") pod "fdba3d5b-a88a-48de-a3ab-7306545204f2" (UID: "fdba3d5b-a88a-48de-a3ab-7306545204f2"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.964103 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "21a48fc5-0546-4804-9ad9-4de5fcef0cb0" (UID: "21a48fc5-0546-4804-9ad9-4de5fcef0cb0"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.967195 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-kube-api-access-hlk7x" (OuterVolumeSpecName: "kube-api-access-hlk7x") pod "21a48fc5-0546-4804-9ad9-4de5fcef0cb0" (UID: "21a48fc5-0546-4804-9ad9-4de5fcef0cb0"). InnerVolumeSpecName "kube-api-access-hlk7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.967421 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdba3d5b-a88a-48de-a3ab-7306545204f2-kube-api-access-5w8gx" (OuterVolumeSpecName: "kube-api-access-5w8gx") pod "fdba3d5b-a88a-48de-a3ab-7306545204f2" (UID: "fdba3d5b-a88a-48de-a3ab-7306545204f2"). InnerVolumeSpecName "kube-api-access-5w8gx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.967501 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "21a48fc5-0546-4804-9ad9-4de5fcef0cb0" (UID: "21a48fc5-0546-4804-9ad9-4de5fcef0cb0"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:35:14 crc kubenswrapper[4906]: I0227 08:35:14.967830 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdba3d5b-a88a-48de-a3ab-7306545204f2-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "fdba3d5b-a88a-48de-a3ab-7306545204f2" (UID: "fdba3d5b-a88a-48de-a3ab-7306545204f2"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.055040 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c0ac385-63ba-401c-9fc4-ce38697c0219-serving-cert\") pod \"route-controller-manager-7d7b5c67bb-pbbzd\" (UID: \"1c0ac385-63ba-401c-9fc4-ce38697c0219\") " pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.055169 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0ac385-63ba-401c-9fc4-ce38697c0219-config\") pod \"route-controller-manager-7d7b5c67bb-pbbzd\" (UID: \"1c0ac385-63ba-401c-9fc4-ce38697c0219\") " pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.055221 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dl6h\" (UniqueName: \"kubernetes.io/projected/1c0ac385-63ba-401c-9fc4-ce38697c0219-kube-api-access-4dl6h\") pod \"route-controller-manager-7d7b5c67bb-pbbzd\" (UID: \"1c0ac385-63ba-401c-9fc4-ce38697c0219\") " pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.055298 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1c0ac385-63ba-401c-9fc4-ce38697c0219-client-ca\") pod \"route-controller-manager-7d7b5c67bb-pbbzd\" (UID: \"1c0ac385-63ba-401c-9fc4-ce38697c0219\") " pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.055352 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5w8gx\" (UniqueName: \"kubernetes.io/projected/fdba3d5b-a88a-48de-a3ab-7306545204f2-kube-api-access-5w8gx\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.055369 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fdba3d5b-a88a-48de-a3ab-7306545204f2-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.055386 4906 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-client-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.055398 4906 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fdba3d5b-a88a-48de-a3ab-7306545204f2-client-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.055409 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fdba3d5b-a88a-48de-a3ab-7306545204f2-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.055421 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlk7x\" (UniqueName: \"kubernetes.io/projected/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-kube-api-access-hlk7x\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.055433 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.055444 4906 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.055456 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/21a48fc5-0546-4804-9ad9-4de5fcef0cb0-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.156644 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dl6h\" (UniqueName: \"kubernetes.io/projected/1c0ac385-63ba-401c-9fc4-ce38697c0219-kube-api-access-4dl6h\") pod \"route-controller-manager-7d7b5c67bb-pbbzd\" (UID: \"1c0ac385-63ba-401c-9fc4-ce38697c0219\") " pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.156760 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1c0ac385-63ba-401c-9fc4-ce38697c0219-client-ca\") pod \"route-controller-manager-7d7b5c67bb-pbbzd\" (UID: \"1c0ac385-63ba-401c-9fc4-ce38697c0219\") " pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.156810 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c0ac385-63ba-401c-9fc4-ce38697c0219-serving-cert\") pod \"route-controller-manager-7d7b5c67bb-pbbzd\" (UID: \"1c0ac385-63ba-401c-9fc4-ce38697c0219\") " pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.156948 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0ac385-63ba-401c-9fc4-ce38697c0219-config\") pod \"route-controller-manager-7d7b5c67bb-pbbzd\" (UID: \"1c0ac385-63ba-401c-9fc4-ce38697c0219\") " pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.158384 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1c0ac385-63ba-401c-9fc4-ce38697c0219-client-ca\") pod \"route-controller-manager-7d7b5c67bb-pbbzd\" (UID: \"1c0ac385-63ba-401c-9fc4-ce38697c0219\") " pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.159119 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0ac385-63ba-401c-9fc4-ce38697c0219-config\") pod \"route-controller-manager-7d7b5c67bb-pbbzd\" (UID: \"1c0ac385-63ba-401c-9fc4-ce38697c0219\") " pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.162201 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c0ac385-63ba-401c-9fc4-ce38697c0219-serving-cert\") pod \"route-controller-manager-7d7b5c67bb-pbbzd\" (UID: \"1c0ac385-63ba-401c-9fc4-ce38697c0219\") " pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.173692 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dl6h\" (UniqueName: \"kubernetes.io/projected/1c0ac385-63ba-401c-9fc4-ce38697c0219-kube-api-access-4dl6h\") pod \"route-controller-manager-7d7b5c67bb-pbbzd\" (UID: \"1c0ac385-63ba-401c-9fc4-ce38697c0219\") " pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.245272 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.261412 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.435054 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" event={"ID":"fdba3d5b-a88a-48de-a3ab-7306545204f2","Type":"ContainerDied","Data":"63d18ebd178f7ac04230ab83983cfbfe5328d18c4b1cb4259b8c9d3d7f536e01"} Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.435084 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.435149 4906 scope.go:117] "RemoveContainer" containerID="62b4e0b014ebe1da7dd7d861f68db7b7d04265dc91934c6e3a43cd727c95a4bc" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.443204 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" event={"ID":"21a48fc5-0546-4804-9ad9-4de5fcef0cb0","Type":"ContainerDied","Data":"80fa788a2eba910f40714cda997e5b3ba4b2739cdd09aba493fcbff35c0cf143"} Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.443271 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5b699f89b4-tsf9z" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.465246 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd"] Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.468510 4906 scope.go:117] "RemoveContainer" containerID="f76ba051201b959a58e82b74bb147b82c6b1af758965fa7f5b2c33686c925508" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.473127 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb"] Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.480123 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7dd7689756-vt2tb"] Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.487288 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5b699f89b4-tsf9z"] Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.491601 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-5b699f89b4-tsf9z"] Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.581237 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Feb 27 08:35:15 crc kubenswrapper[4906]: I0227 08:35:15.735460 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Feb 27 08:35:16 crc kubenswrapper[4906]: I0227 08:35:16.454286 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" event={"ID":"1c0ac385-63ba-401c-9fc4-ce38697c0219","Type":"ContainerStarted","Data":"7a19219220e7fdec2cda8bc041646bb45e21b41b08bfe552565d7fdaf5877ada"} Feb 27 08:35:16 crc kubenswrapper[4906]: I0227 08:35:16.454851 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" event={"ID":"1c0ac385-63ba-401c-9fc4-ce38697c0219","Type":"ContainerStarted","Data":"1d426f5721b256f8ee3aa1e5618664d86b309b58922a65e51b466bf8dbb79179"} Feb 27 08:35:16 crc kubenswrapper[4906]: I0227 08:35:16.454939 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" Feb 27 08:35:16 crc kubenswrapper[4906]: I0227 08:35:16.460671 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" Feb 27 08:35:16 crc kubenswrapper[4906]: I0227 08:35:16.478824 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" podStartSLOduration=11.478792172 podStartE2EDuration="11.478792172s" podCreationTimestamp="2026-02-27 08:35:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:35:16.472497212 +0000 UTC m=+414.866898872" watchObservedRunningTime="2026-02-27 08:35:16.478792172 +0000 UTC m=+414.873193782" Feb 27 08:35:16 crc kubenswrapper[4906]: I0227 08:35:16.529342 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Feb 27 08:35:16 crc kubenswrapper[4906]: I0227 08:35:16.561863 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21a48fc5-0546-4804-9ad9-4de5fcef0cb0" path="/var/lib/kubelet/pods/21a48fc5-0546-4804-9ad9-4de5fcef0cb0/volumes" Feb 27 08:35:16 crc kubenswrapper[4906]: I0227 08:35:16.562561 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdba3d5b-a88a-48de-a3ab-7306545204f2" path="/var/lib/kubelet/pods/fdba3d5b-a88a-48de-a3ab-7306545204f2/volumes" Feb 27 08:35:16 crc kubenswrapper[4906]: I0227 08:35:16.572490 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:35:16 crc kubenswrapper[4906]: I0227 08:35:16.572589 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:35:16 crc kubenswrapper[4906]: I0227 08:35:16.577073 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:35:16 crc kubenswrapper[4906]: I0227 08:35:16.577860 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.189700 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.327071 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.476438 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.755453 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-66d857645d-ctr64"] Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.756314 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.758804 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.758847 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.759070 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.759139 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.765133 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.765541 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.775096 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.783334 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-66d857645d-ctr64"] Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.801381 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-config\") pod \"controller-manager-66d857645d-ctr64\" (UID: \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\") " pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.801436 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6msfh\" (UniqueName: \"kubernetes.io/projected/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-kube-api-access-6msfh\") pod \"controller-manager-66d857645d-ctr64\" (UID: \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\") " pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.801458 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-serving-cert\") pod \"controller-manager-66d857645d-ctr64\" (UID: \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\") " pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.801495 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-proxy-ca-bundles\") pod \"controller-manager-66d857645d-ctr64\" (UID: \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\") " pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.801771 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-client-ca\") pod \"controller-manager-66d857645d-ctr64\" (UID: \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\") " pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.903321 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-client-ca\") pod \"controller-manager-66d857645d-ctr64\" (UID: \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\") " pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.903419 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-config\") pod \"controller-manager-66d857645d-ctr64\" (UID: \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\") " pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.903455 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6msfh\" (UniqueName: \"kubernetes.io/projected/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-kube-api-access-6msfh\") pod \"controller-manager-66d857645d-ctr64\" (UID: \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\") " pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.903494 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-serving-cert\") pod \"controller-manager-66d857645d-ctr64\" (UID: \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\") " pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.903535 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-proxy-ca-bundles\") pod \"controller-manager-66d857645d-ctr64\" (UID: \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\") " pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.905452 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-config\") pod \"controller-manager-66d857645d-ctr64\" (UID: \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\") " pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.905500 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-proxy-ca-bundles\") pod \"controller-manager-66d857645d-ctr64\" (UID: \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\") " pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.905496 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-client-ca\") pod \"controller-manager-66d857645d-ctr64\" (UID: \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\") " pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.917450 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-serving-cert\") pod \"controller-manager-66d857645d-ctr64\" (UID: \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\") " pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.922063 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6msfh\" (UniqueName: \"kubernetes.io/projected/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-kube-api-access-6msfh\") pod \"controller-manager-66d857645d-ctr64\" (UID: \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\") " pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:17 crc kubenswrapper[4906]: I0227 08:35:17.982442 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Feb 27 08:35:18 crc kubenswrapper[4906]: I0227 08:35:18.049654 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Feb 27 08:35:18 crc kubenswrapper[4906]: I0227 08:35:18.081611 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:18 crc kubenswrapper[4906]: I0227 08:35:18.232491 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Feb 27 08:35:18 crc kubenswrapper[4906]: I0227 08:35:18.287515 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-66d857645d-ctr64"] Feb 27 08:35:18 crc kubenswrapper[4906]: I0227 08:35:18.307674 4906 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 27 08:35:18 crc kubenswrapper[4906]: I0227 08:35:18.307997 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://b9c52c29066c277f8eb7400ea69e5c14389874bb7939f2b482ee598b09f075c9" gracePeriod=5 Feb 27 08:35:18 crc kubenswrapper[4906]: I0227 08:35:18.383072 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Feb 27 08:35:18 crc kubenswrapper[4906]: I0227 08:35:18.477640 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" event={"ID":"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5","Type":"ContainerStarted","Data":"7494130ab01fa3dc31e3ca01255c382b60445795a9086a07565b72060d54862e"} Feb 27 08:35:18 crc kubenswrapper[4906]: I0227 08:35:18.478139 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" event={"ID":"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5","Type":"ContainerStarted","Data":"09fa5510c3b9c330fb68953d1a0b8e6f8ad9e162147c4ad8ee05efa4c353c9c0"} Feb 27 08:35:18 crc kubenswrapper[4906]: I0227 08:35:18.692342 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Feb 27 08:35:19 crc kubenswrapper[4906]: I0227 08:35:19.523751 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" podStartSLOduration=14.523717144999999 podStartE2EDuration="14.523717145s" podCreationTimestamp="2026-02-27 08:35:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:35:19.514833051 +0000 UTC m=+417.909234711" watchObservedRunningTime="2026-02-27 08:35:19.523717145 +0000 UTC m=+417.918118785" Feb 27 08:35:19 crc kubenswrapper[4906]: I0227 08:35:19.548102 4906 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Feb 27 08:35:19 crc kubenswrapper[4906]: I0227 08:35:19.933793 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Feb 27 08:35:20 crc kubenswrapper[4906]: I0227 08:35:20.012576 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Feb 27 08:35:20 crc kubenswrapper[4906]: I0227 08:35:20.189384 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Feb 27 08:35:20 crc kubenswrapper[4906]: I0227 08:35:20.235004 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Feb 27 08:35:20 crc kubenswrapper[4906]: I0227 08:35:20.579551 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Feb 27 08:35:20 crc kubenswrapper[4906]: I0227 08:35:20.633078 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Feb 27 08:35:20 crc kubenswrapper[4906]: I0227 08:35:20.952172 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Feb 27 08:35:21 crc kubenswrapper[4906]: I0227 08:35:21.459634 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Feb 27 08:35:21 crc kubenswrapper[4906]: I0227 08:35:21.957305 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Feb 27 08:35:22 crc kubenswrapper[4906]: I0227 08:35:22.004676 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Feb 27 08:35:22 crc kubenswrapper[4906]: I0227 08:35:22.658765 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Feb 27 08:35:22 crc kubenswrapper[4906]: I0227 08:35:22.679314 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Feb 27 08:35:22 crc kubenswrapper[4906]: I0227 08:35:22.753758 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Feb 27 08:35:22 crc kubenswrapper[4906]: I0227 08:35:22.825509 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.063652 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.217927 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.269234 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.374239 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.449231 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.449330 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.513674 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.513742 4906 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="b9c52c29066c277f8eb7400ea69e5c14389874bb7939f2b482ee598b09f075c9" exitCode=137 Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.513800 4906 scope.go:117] "RemoveContainer" containerID="b9c52c29066c277f8eb7400ea69e5c14389874bb7939f2b482ee598b09f075c9" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.513856 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.533414 4906 scope.go:117] "RemoveContainer" containerID="b9c52c29066c277f8eb7400ea69e5c14389874bb7939f2b482ee598b09f075c9" Feb 27 08:35:23 crc kubenswrapper[4906]: E0227 08:35:23.534166 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b9c52c29066c277f8eb7400ea69e5c14389874bb7939f2b482ee598b09f075c9\": container with ID starting with b9c52c29066c277f8eb7400ea69e5c14389874bb7939f2b482ee598b09f075c9 not found: ID does not exist" containerID="b9c52c29066c277f8eb7400ea69e5c14389874bb7939f2b482ee598b09f075c9" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.534238 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b9c52c29066c277f8eb7400ea69e5c14389874bb7939f2b482ee598b09f075c9"} err="failed to get container status \"b9c52c29066c277f8eb7400ea69e5c14389874bb7939f2b482ee598b09f075c9\": rpc error: code = NotFound desc = could not find container \"b9c52c29066c277f8eb7400ea69e5c14389874bb7939f2b482ee598b09f075c9\": container with ID starting with b9c52c29066c277f8eb7400ea69e5c14389874bb7939f2b482ee598b09f075c9 not found: ID does not exist" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.604526 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.604606 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.604698 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.604732 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.604787 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.604806 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.604839 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.604961 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.604949 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.605417 4906 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.605438 4906 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.605451 4906 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.605462 4906 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.614717 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.707007 4906 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:23 crc kubenswrapper[4906]: I0227 08:35:23.723535 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Feb 27 08:35:24 crc kubenswrapper[4906]: I0227 08:35:24.445406 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Feb 27 08:35:24 crc kubenswrapper[4906]: I0227 08:35:24.560297 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Feb 27 08:35:24 crc kubenswrapper[4906]: I0227 08:35:24.560621 4906 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Feb 27 08:35:24 crc kubenswrapper[4906]: I0227 08:35:24.563227 4906 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Feb 27 08:35:24 crc kubenswrapper[4906]: I0227 08:35:24.572895 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 27 08:35:24 crc kubenswrapper[4906]: I0227 08:35:24.572949 4906 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="a785c9f1-9ff9-4ca4-a051-339def81f6fc" Feb 27 08:35:24 crc kubenswrapper[4906]: I0227 08:35:24.578204 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 27 08:35:24 crc kubenswrapper[4906]: I0227 08:35:24.578275 4906 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="a785c9f1-9ff9-4ca4-a051-339def81f6fc" Feb 27 08:35:24 crc kubenswrapper[4906]: I0227 08:35:24.641968 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Feb 27 08:35:24 crc kubenswrapper[4906]: I0227 08:35:24.800685 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Feb 27 08:35:25 crc kubenswrapper[4906]: I0227 08:35:25.151313 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Feb 27 08:35:25 crc kubenswrapper[4906]: I0227 08:35:25.798841 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-66d857645d-ctr64"] Feb 27 08:35:25 crc kubenswrapper[4906]: I0227 08:35:25.799477 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" podUID="4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5" containerName="controller-manager" containerID="cri-o://7494130ab01fa3dc31e3ca01255c382b60445795a9086a07565b72060d54862e" gracePeriod=30 Feb 27 08:35:25 crc kubenswrapper[4906]: I0227 08:35:25.799690 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:25 crc kubenswrapper[4906]: I0227 08:35:25.807871 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:25 crc kubenswrapper[4906]: I0227 08:35:25.818910 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd"] Feb 27 08:35:25 crc kubenswrapper[4906]: I0227 08:35:25.819280 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" podUID="1c0ac385-63ba-401c-9fc4-ce38697c0219" containerName="route-controller-manager" containerID="cri-o://7a19219220e7fdec2cda8bc041646bb45e21b41b08bfe552565d7fdaf5877ada" gracePeriod=30 Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.030395 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.136291 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.323140 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.328192 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.445911 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0ac385-63ba-401c-9fc4-ce38697c0219-config\") pod \"1c0ac385-63ba-401c-9fc4-ce38697c0219\" (UID: \"1c0ac385-63ba-401c-9fc4-ce38697c0219\") " Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.445997 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1c0ac385-63ba-401c-9fc4-ce38697c0219-client-ca\") pod \"1c0ac385-63ba-401c-9fc4-ce38697c0219\" (UID: \"1c0ac385-63ba-401c-9fc4-ce38697c0219\") " Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.446024 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-client-ca\") pod \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\" (UID: \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\") " Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.446083 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-proxy-ca-bundles\") pod \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\" (UID: \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\") " Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.446131 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-serving-cert\") pod \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\" (UID: \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\") " Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.446177 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-config\") pod \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\" (UID: \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\") " Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.446224 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6msfh\" (UniqueName: \"kubernetes.io/projected/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-kube-api-access-6msfh\") pod \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\" (UID: \"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5\") " Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.446287 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4dl6h\" (UniqueName: \"kubernetes.io/projected/1c0ac385-63ba-401c-9fc4-ce38697c0219-kube-api-access-4dl6h\") pod \"1c0ac385-63ba-401c-9fc4-ce38697c0219\" (UID: \"1c0ac385-63ba-401c-9fc4-ce38697c0219\") " Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.446338 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c0ac385-63ba-401c-9fc4-ce38697c0219-serving-cert\") pod \"1c0ac385-63ba-401c-9fc4-ce38697c0219\" (UID: \"1c0ac385-63ba-401c-9fc4-ce38697c0219\") " Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.447545 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c0ac385-63ba-401c-9fc4-ce38697c0219-client-ca" (OuterVolumeSpecName: "client-ca") pod "1c0ac385-63ba-401c-9fc4-ce38697c0219" (UID: "1c0ac385-63ba-401c-9fc4-ce38697c0219"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.447614 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-config" (OuterVolumeSpecName: "config") pod "4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5" (UID: "4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.447695 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c0ac385-63ba-401c-9fc4-ce38697c0219-config" (OuterVolumeSpecName: "config") pod "1c0ac385-63ba-401c-9fc4-ce38697c0219" (UID: "1c0ac385-63ba-401c-9fc4-ce38697c0219"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.447793 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-client-ca" (OuterVolumeSpecName: "client-ca") pod "4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5" (UID: "4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.447951 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5" (UID: "4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.452434 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c0ac385-63ba-401c-9fc4-ce38697c0219-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1c0ac385-63ba-401c-9fc4-ce38697c0219" (UID: "1c0ac385-63ba-401c-9fc4-ce38697c0219"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.452472 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-kube-api-access-6msfh" (OuterVolumeSpecName: "kube-api-access-6msfh") pod "4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5" (UID: "4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5"). InnerVolumeSpecName "kube-api-access-6msfh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.452630 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5" (UID: "4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.459808 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c0ac385-63ba-401c-9fc4-ce38697c0219-kube-api-access-4dl6h" (OuterVolumeSpecName: "kube-api-access-4dl6h") pod "1c0ac385-63ba-401c-9fc4-ce38697c0219" (UID: "1c0ac385-63ba-401c-9fc4-ce38697c0219"). InnerVolumeSpecName "kube-api-access-4dl6h". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.536191 4906 generic.go:334] "Generic (PLEG): container finished" podID="1c0ac385-63ba-401c-9fc4-ce38697c0219" containerID="7a19219220e7fdec2cda8bc041646bb45e21b41b08bfe552565d7fdaf5877ada" exitCode=0 Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.536272 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.536292 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" event={"ID":"1c0ac385-63ba-401c-9fc4-ce38697c0219","Type":"ContainerDied","Data":"7a19219220e7fdec2cda8bc041646bb45e21b41b08bfe552565d7fdaf5877ada"} Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.537441 4906 scope.go:117] "RemoveContainer" containerID="7a19219220e7fdec2cda8bc041646bb45e21b41b08bfe552565d7fdaf5877ada" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.537475 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.537305 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd" event={"ID":"1c0ac385-63ba-401c-9fc4-ce38697c0219","Type":"ContainerDied","Data":"1d426f5721b256f8ee3aa1e5618664d86b309b58922a65e51b466bf8dbb79179"} Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.539042 4906 generic.go:334] "Generic (PLEG): container finished" podID="4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5" containerID="7494130ab01fa3dc31e3ca01255c382b60445795a9086a07565b72060d54862e" exitCode=0 Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.539095 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" event={"ID":"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5","Type":"ContainerDied","Data":"7494130ab01fa3dc31e3ca01255c382b60445795a9086a07565b72060d54862e"} Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.539135 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" event={"ID":"4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5","Type":"ContainerDied","Data":"09fa5510c3b9c330fb68953d1a0b8e6f8ad9e162147c4ad8ee05efa4c353c9c0"} Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.539201 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-66d857645d-ctr64" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.547905 4906 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.547955 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.547975 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.548000 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6msfh\" (UniqueName: \"kubernetes.io/projected/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-kube-api-access-6msfh\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.548017 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4dl6h\" (UniqueName: \"kubernetes.io/projected/1c0ac385-63ba-401c-9fc4-ce38697c0219-kube-api-access-4dl6h\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.548035 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c0ac385-63ba-401c-9fc4-ce38697c0219-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.548052 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c0ac385-63ba-401c-9fc4-ce38697c0219-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.548067 4906 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/1c0ac385-63ba-401c-9fc4-ce38697c0219-client-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.548084 4906 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5-client-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.564247 4906 scope.go:117] "RemoveContainer" containerID="7a19219220e7fdec2cda8bc041646bb45e21b41b08bfe552565d7fdaf5877ada" Feb 27 08:35:26 crc kubenswrapper[4906]: E0227 08:35:26.564719 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a19219220e7fdec2cda8bc041646bb45e21b41b08bfe552565d7fdaf5877ada\": container with ID starting with 7a19219220e7fdec2cda8bc041646bb45e21b41b08bfe552565d7fdaf5877ada not found: ID does not exist" containerID="7a19219220e7fdec2cda8bc041646bb45e21b41b08bfe552565d7fdaf5877ada" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.564763 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a19219220e7fdec2cda8bc041646bb45e21b41b08bfe552565d7fdaf5877ada"} err="failed to get container status \"7a19219220e7fdec2cda8bc041646bb45e21b41b08bfe552565d7fdaf5877ada\": rpc error: code = NotFound desc = could not find container \"7a19219220e7fdec2cda8bc041646bb45e21b41b08bfe552565d7fdaf5877ada\": container with ID starting with 7a19219220e7fdec2cda8bc041646bb45e21b41b08bfe552565d7fdaf5877ada not found: ID does not exist" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.564793 4906 scope.go:117] "RemoveContainer" containerID="7494130ab01fa3dc31e3ca01255c382b60445795a9086a07565b72060d54862e" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.578341 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-66d857645d-ctr64"] Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.589672 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-66d857645d-ctr64"] Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.591587 4906 scope.go:117] "RemoveContainer" containerID="7494130ab01fa3dc31e3ca01255c382b60445795a9086a07565b72060d54862e" Feb 27 08:35:26 crc kubenswrapper[4906]: E0227 08:35:26.592146 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7494130ab01fa3dc31e3ca01255c382b60445795a9086a07565b72060d54862e\": container with ID starting with 7494130ab01fa3dc31e3ca01255c382b60445795a9086a07565b72060d54862e not found: ID does not exist" containerID="7494130ab01fa3dc31e3ca01255c382b60445795a9086a07565b72060d54862e" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.592190 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7494130ab01fa3dc31e3ca01255c382b60445795a9086a07565b72060d54862e"} err="failed to get container status \"7494130ab01fa3dc31e3ca01255c382b60445795a9086a07565b72060d54862e\": rpc error: code = NotFound desc = could not find container \"7494130ab01fa3dc31e3ca01255c382b60445795a9086a07565b72060d54862e\": container with ID starting with 7494130ab01fa3dc31e3ca01255c382b60445795a9086a07565b72060d54862e not found: ID does not exist" Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.595659 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd"] Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.600643 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7d7b5c67bb-pbbzd"] Feb 27 08:35:26 crc kubenswrapper[4906]: I0227 08:35:26.798618 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.061337 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.079791 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.199608 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.695952 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.736185 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.768096 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw"] Feb 27 08:35:27 crc kubenswrapper[4906]: E0227 08:35:27.777444 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.777507 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 27 08:35:27 crc kubenswrapper[4906]: E0227 08:35:27.777525 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5" containerName="controller-manager" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.777534 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5" containerName="controller-manager" Feb 27 08:35:27 crc kubenswrapper[4906]: E0227 08:35:27.777547 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c0ac385-63ba-401c-9fc4-ce38697c0219" containerName="route-controller-manager" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.777554 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c0ac385-63ba-401c-9fc4-ce38697c0219" containerName="route-controller-manager" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.777670 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.777683 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5" containerName="controller-manager" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.777692 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c0ac385-63ba-401c-9fc4-ce38697c0219" containerName="route-controller-manager" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.778188 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7bd5d7d548-m99r5"] Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.778409 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.778695 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.783302 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.783380 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.783985 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.784436 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.784735 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.785110 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.785531 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.785704 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.785866 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.786046 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.786735 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw"] Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.787053 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.787367 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.797009 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.797350 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7bd5d7d548-m99r5"] Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.868802 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c11bf18e-013b-45b3-b017-2c8d80c49c05-serving-cert\") pod \"controller-manager-7bd5d7d548-m99r5\" (UID: \"c11bf18e-013b-45b3-b017-2c8d80c49c05\") " pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.868866 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-serving-cert\") pod \"route-controller-manager-676bbd764-p56kw\" (UID: \"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d\") " pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.868926 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c11bf18e-013b-45b3-b017-2c8d80c49c05-proxy-ca-bundles\") pod \"controller-manager-7bd5d7d548-m99r5\" (UID: \"c11bf18e-013b-45b3-b017-2c8d80c49c05\") " pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.868945 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jn5r\" (UniqueName: \"kubernetes.io/projected/c11bf18e-013b-45b3-b017-2c8d80c49c05-kube-api-access-8jn5r\") pod \"controller-manager-7bd5d7d548-m99r5\" (UID: \"c11bf18e-013b-45b3-b017-2c8d80c49c05\") " pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.868971 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-config\") pod \"route-controller-manager-676bbd764-p56kw\" (UID: \"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d\") " pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.868989 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c11bf18e-013b-45b3-b017-2c8d80c49c05-client-ca\") pod \"controller-manager-7bd5d7d548-m99r5\" (UID: \"c11bf18e-013b-45b3-b017-2c8d80c49c05\") " pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.869006 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c11bf18e-013b-45b3-b017-2c8d80c49c05-config\") pod \"controller-manager-7bd5d7d548-m99r5\" (UID: \"c11bf18e-013b-45b3-b017-2c8d80c49c05\") " pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.869063 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-client-ca\") pod \"route-controller-manager-676bbd764-p56kw\" (UID: \"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d\") " pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.869091 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75cpv\" (UniqueName: \"kubernetes.io/projected/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-kube-api-access-75cpv\") pod \"route-controller-manager-676bbd764-p56kw\" (UID: \"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d\") " pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.970191 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c11bf18e-013b-45b3-b017-2c8d80c49c05-serving-cert\") pod \"controller-manager-7bd5d7d548-m99r5\" (UID: \"c11bf18e-013b-45b3-b017-2c8d80c49c05\") " pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.970256 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-serving-cert\") pod \"route-controller-manager-676bbd764-p56kw\" (UID: \"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d\") " pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.970281 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c11bf18e-013b-45b3-b017-2c8d80c49c05-proxy-ca-bundles\") pod \"controller-manager-7bd5d7d548-m99r5\" (UID: \"c11bf18e-013b-45b3-b017-2c8d80c49c05\") " pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.970303 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jn5r\" (UniqueName: \"kubernetes.io/projected/c11bf18e-013b-45b3-b017-2c8d80c49c05-kube-api-access-8jn5r\") pod \"controller-manager-7bd5d7d548-m99r5\" (UID: \"c11bf18e-013b-45b3-b017-2c8d80c49c05\") " pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.970333 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-config\") pod \"route-controller-manager-676bbd764-p56kw\" (UID: \"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d\") " pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.970351 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c11bf18e-013b-45b3-b017-2c8d80c49c05-config\") pod \"controller-manager-7bd5d7d548-m99r5\" (UID: \"c11bf18e-013b-45b3-b017-2c8d80c49c05\") " pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.970368 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c11bf18e-013b-45b3-b017-2c8d80c49c05-client-ca\") pod \"controller-manager-7bd5d7d548-m99r5\" (UID: \"c11bf18e-013b-45b3-b017-2c8d80c49c05\") " pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.970400 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-client-ca\") pod \"route-controller-manager-676bbd764-p56kw\" (UID: \"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d\") " pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.970426 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75cpv\" (UniqueName: \"kubernetes.io/projected/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-kube-api-access-75cpv\") pod \"route-controller-manager-676bbd764-p56kw\" (UID: \"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d\") " pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.972149 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c11bf18e-013b-45b3-b017-2c8d80c49c05-client-ca\") pod \"controller-manager-7bd5d7d548-m99r5\" (UID: \"c11bf18e-013b-45b3-b017-2c8d80c49c05\") " pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.972731 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-client-ca\") pod \"route-controller-manager-676bbd764-p56kw\" (UID: \"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d\") " pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.972929 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c11bf18e-013b-45b3-b017-2c8d80c49c05-proxy-ca-bundles\") pod \"controller-manager-7bd5d7d548-m99r5\" (UID: \"c11bf18e-013b-45b3-b017-2c8d80c49c05\") " pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.972946 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c11bf18e-013b-45b3-b017-2c8d80c49c05-config\") pod \"controller-manager-7bd5d7d548-m99r5\" (UID: \"c11bf18e-013b-45b3-b017-2c8d80c49c05\") " pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.973574 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-config\") pod \"route-controller-manager-676bbd764-p56kw\" (UID: \"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d\") " pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.977465 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-serving-cert\") pod \"route-controller-manager-676bbd764-p56kw\" (UID: \"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d\") " pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.983799 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c11bf18e-013b-45b3-b017-2c8d80c49c05-serving-cert\") pod \"controller-manager-7bd5d7d548-m99r5\" (UID: \"c11bf18e-013b-45b3-b017-2c8d80c49c05\") " pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.991858 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jn5r\" (UniqueName: \"kubernetes.io/projected/c11bf18e-013b-45b3-b017-2c8d80c49c05-kube-api-access-8jn5r\") pod \"controller-manager-7bd5d7d548-m99r5\" (UID: \"c11bf18e-013b-45b3-b017-2c8d80c49c05\") " pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:35:27 crc kubenswrapper[4906]: I0227 08:35:27.993812 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75cpv\" (UniqueName: \"kubernetes.io/projected/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-kube-api-access-75cpv\") pod \"route-controller-manager-676bbd764-p56kw\" (UID: \"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d\") " pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" Feb 27 08:35:28 crc kubenswrapper[4906]: I0227 08:35:28.103611 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" Feb 27 08:35:28 crc kubenswrapper[4906]: I0227 08:35:28.111402 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:35:28 crc kubenswrapper[4906]: I0227 08:35:28.522026 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7bd5d7d548-m99r5"] Feb 27 08:35:28 crc kubenswrapper[4906]: W0227 08:35:28.559112 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc11bf18e_013b_45b3_b017_2c8d80c49c05.slice/crio-f7efa8c237d692d3b8b3fb1c4ab2e5ef1d7f1b4d17252b62fb1691b2e7fdf7c8 WatchSource:0}: Error finding container f7efa8c237d692d3b8b3fb1c4ab2e5ef1d7f1b4d17252b62fb1691b2e7fdf7c8: Status 404 returned error can't find the container with id f7efa8c237d692d3b8b3fb1c4ab2e5ef1d7f1b4d17252b62fb1691b2e7fdf7c8 Feb 27 08:35:28 crc kubenswrapper[4906]: I0227 08:35:28.562485 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c0ac385-63ba-401c-9fc4-ce38697c0219" path="/var/lib/kubelet/pods/1c0ac385-63ba-401c-9fc4-ce38697c0219/volumes" Feb 27 08:35:28 crc kubenswrapper[4906]: I0227 08:35:28.563271 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5" path="/var/lib/kubelet/pods/4f0f69e0-f55d-4d16-8eb6-8e03d4b5b5d5/volumes" Feb 27 08:35:28 crc kubenswrapper[4906]: I0227 08:35:28.591509 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw"] Feb 27 08:35:29 crc kubenswrapper[4906]: I0227 08:35:29.074600 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Feb 27 08:35:29 crc kubenswrapper[4906]: I0227 08:35:29.088163 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Feb 27 08:35:29 crc kubenswrapper[4906]: I0227 08:35:29.548429 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Feb 27 08:35:29 crc kubenswrapper[4906]: I0227 08:35:29.575955 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" event={"ID":"c11bf18e-013b-45b3-b017-2c8d80c49c05","Type":"ContainerStarted","Data":"657c2183d0c07bd06e0bae7b19f12494a09fe7546f2b3be75b55775a1077f7cb"} Feb 27 08:35:29 crc kubenswrapper[4906]: I0227 08:35:29.576049 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:35:29 crc kubenswrapper[4906]: I0227 08:35:29.576080 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" event={"ID":"c11bf18e-013b-45b3-b017-2c8d80c49c05","Type":"ContainerStarted","Data":"f7efa8c237d692d3b8b3fb1c4ab2e5ef1d7f1b4d17252b62fb1691b2e7fdf7c8"} Feb 27 08:35:29 crc kubenswrapper[4906]: I0227 08:35:29.579772 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" event={"ID":"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d","Type":"ContainerStarted","Data":"e6177de189f290d654f201fb72a11b14ae69170f6aca8df10bc8d88423b15f16"} Feb 27 08:35:29 crc kubenswrapper[4906]: I0227 08:35:29.579831 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" event={"ID":"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d","Type":"ContainerStarted","Data":"d20879919e4497b907fa86943cb8f4341be84008a87feab6f5bdb3909f7b9aea"} Feb 27 08:35:29 crc kubenswrapper[4906]: I0227 08:35:29.580026 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Feb 27 08:35:29 crc kubenswrapper[4906]: I0227 08:35:29.580069 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" Feb 27 08:35:29 crc kubenswrapper[4906]: I0227 08:35:29.582980 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:35:29 crc kubenswrapper[4906]: I0227 08:35:29.586962 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" Feb 27 08:35:29 crc kubenswrapper[4906]: I0227 08:35:29.599620 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" podStartSLOduration=4.599599705 podStartE2EDuration="4.599599705s" podCreationTimestamp="2026-02-27 08:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:35:29.59803115 +0000 UTC m=+427.992432770" watchObservedRunningTime="2026-02-27 08:35:29.599599705 +0000 UTC m=+427.994001345" Feb 27 08:35:29 crc kubenswrapper[4906]: I0227 08:35:29.617742 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" podStartSLOduration=4.617717223 podStartE2EDuration="4.617717223s" podCreationTimestamp="2026-02-27 08:35:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:35:29.615232862 +0000 UTC m=+428.009634492" watchObservedRunningTime="2026-02-27 08:35:29.617717223 +0000 UTC m=+428.012118833" Feb 27 08:35:29 crc kubenswrapper[4906]: I0227 08:35:29.906806 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Feb 27 08:35:29 crc kubenswrapper[4906]: I0227 08:35:29.985144 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Feb 27 08:35:30 crc kubenswrapper[4906]: I0227 08:35:30.452062 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Feb 27 08:35:30 crc kubenswrapper[4906]: I0227 08:35:30.671797 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Feb 27 08:35:30 crc kubenswrapper[4906]: I0227 08:35:30.925507 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Feb 27 08:35:30 crc kubenswrapper[4906]: I0227 08:35:30.930324 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Feb 27 08:35:30 crc kubenswrapper[4906]: I0227 08:35:30.981769 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Feb 27 08:35:31 crc kubenswrapper[4906]: I0227 08:35:31.174884 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Feb 27 08:35:31 crc kubenswrapper[4906]: I0227 08:35:31.403614 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Feb 27 08:35:31 crc kubenswrapper[4906]: I0227 08:35:31.521249 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Feb 27 08:35:33 crc kubenswrapper[4906]: I0227 08:35:33.330475 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Feb 27 08:35:33 crc kubenswrapper[4906]: I0227 08:35:33.784573 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Feb 27 08:35:34 crc kubenswrapper[4906]: I0227 08:35:34.373767 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Feb 27 08:35:34 crc kubenswrapper[4906]: I0227 08:35:34.971567 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Feb 27 08:35:35 crc kubenswrapper[4906]: I0227 08:35:35.195979 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Feb 27 08:35:36 crc kubenswrapper[4906]: I0227 08:35:36.216334 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Feb 27 08:35:36 crc kubenswrapper[4906]: I0227 08:35:36.790764 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Feb 27 08:35:37 crc kubenswrapper[4906]: I0227 08:35:37.641701 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Feb 27 08:35:37 crc kubenswrapper[4906]: I0227 08:35:37.881396 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Feb 27 08:35:38 crc kubenswrapper[4906]: I0227 08:35:38.459242 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Feb 27 08:35:38 crc kubenswrapper[4906]: I0227 08:35:38.721841 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Feb 27 08:35:38 crc kubenswrapper[4906]: I0227 08:35:38.820511 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Feb 27 08:35:39 crc kubenswrapper[4906]: I0227 08:35:39.700576 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Feb 27 08:35:39 crc kubenswrapper[4906]: I0227 08:35:39.917480 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Feb 27 08:35:54 crc kubenswrapper[4906]: I0227 08:35:54.844926 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:35:54 crc kubenswrapper[4906]: I0227 08:35:54.845921 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:35:59 crc kubenswrapper[4906]: I0227 08:35:59.347932 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7bd5d7d548-m99r5"] Feb 27 08:35:59 crc kubenswrapper[4906]: I0227 08:35:59.348733 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" podUID="c11bf18e-013b-45b3-b017-2c8d80c49c05" containerName="controller-manager" containerID="cri-o://657c2183d0c07bd06e0bae7b19f12494a09fe7546f2b3be75b55775a1077f7cb" gracePeriod=30 Feb 27 08:35:59 crc kubenswrapper[4906]: I0227 08:35:59.445491 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw"] Feb 27 08:35:59 crc kubenswrapper[4906]: I0227 08:35:59.445745 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" podUID="b4d3dc98-a521-4a77-b6c4-3ca51f818c6d" containerName="route-controller-manager" containerID="cri-o://e6177de189f290d654f201fb72a11b14ae69170f6aca8df10bc8d88423b15f16" gracePeriod=30 Feb 27 08:35:59 crc kubenswrapper[4906]: I0227 08:35:59.829154 4906 generic.go:334] "Generic (PLEG): container finished" podID="c11bf18e-013b-45b3-b017-2c8d80c49c05" containerID="657c2183d0c07bd06e0bae7b19f12494a09fe7546f2b3be75b55775a1077f7cb" exitCode=0 Feb 27 08:35:59 crc kubenswrapper[4906]: I0227 08:35:59.829791 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" event={"ID":"c11bf18e-013b-45b3-b017-2c8d80c49c05","Type":"ContainerDied","Data":"657c2183d0c07bd06e0bae7b19f12494a09fe7546f2b3be75b55775a1077f7cb"} Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.133479 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536356-bbt2l"] Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.134248 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536356-bbt2l" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.137475 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.138290 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.138437 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.145567 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536356-bbt2l"] Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.231133 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcdkv\" (UniqueName: \"kubernetes.io/projected/d3135b40-4120-49cf-9649-52a416ce5313-kube-api-access-qcdkv\") pod \"auto-csr-approver-29536356-bbt2l\" (UID: \"d3135b40-4120-49cf-9649-52a416ce5313\") " pod="openshift-infra/auto-csr-approver-29536356-bbt2l" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.332166 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcdkv\" (UniqueName: \"kubernetes.io/projected/d3135b40-4120-49cf-9649-52a416ce5313-kube-api-access-qcdkv\") pod \"auto-csr-approver-29536356-bbt2l\" (UID: \"d3135b40-4120-49cf-9649-52a416ce5313\") " pod="openshift-infra/auto-csr-approver-29536356-bbt2l" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.367933 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcdkv\" (UniqueName: \"kubernetes.io/projected/d3135b40-4120-49cf-9649-52a416ce5313-kube-api-access-qcdkv\") pod \"auto-csr-approver-29536356-bbt2l\" (UID: \"d3135b40-4120-49cf-9649-52a416ce5313\") " pod="openshift-infra/auto-csr-approver-29536356-bbt2l" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.462635 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536356-bbt2l" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.636870 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.653333 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.669330 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v"] Feb 27 08:36:00 crc kubenswrapper[4906]: E0227 08:36:00.669600 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4d3dc98-a521-4a77-b6c4-3ca51f818c6d" containerName="route-controller-manager" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.669615 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4d3dc98-a521-4a77-b6c4-3ca51f818c6d" containerName="route-controller-manager" Feb 27 08:36:00 crc kubenswrapper[4906]: E0227 08:36:00.669624 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c11bf18e-013b-45b3-b017-2c8d80c49c05" containerName="controller-manager" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.669630 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="c11bf18e-013b-45b3-b017-2c8d80c49c05" containerName="controller-manager" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.669740 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4d3dc98-a521-4a77-b6c4-3ca51f818c6d" containerName="route-controller-manager" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.669759 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="c11bf18e-013b-45b3-b017-2c8d80c49c05" containerName="controller-manager" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.670197 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.682711 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v"] Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.710311 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536356-bbt2l"] Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.736540 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c11bf18e-013b-45b3-b017-2c8d80c49c05-proxy-ca-bundles\") pod \"c11bf18e-013b-45b3-b017-2c8d80c49c05\" (UID: \"c11bf18e-013b-45b3-b017-2c8d80c49c05\") " Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.736598 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c11bf18e-013b-45b3-b017-2c8d80c49c05-config\") pod \"c11bf18e-013b-45b3-b017-2c8d80c49c05\" (UID: \"c11bf18e-013b-45b3-b017-2c8d80c49c05\") " Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.736653 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-config\") pod \"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d\" (UID: \"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d\") " Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.736675 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c11bf18e-013b-45b3-b017-2c8d80c49c05-serving-cert\") pod \"c11bf18e-013b-45b3-b017-2c8d80c49c05\" (UID: \"c11bf18e-013b-45b3-b017-2c8d80c49c05\") " Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.736714 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jn5r\" (UniqueName: \"kubernetes.io/projected/c11bf18e-013b-45b3-b017-2c8d80c49c05-kube-api-access-8jn5r\") pod \"c11bf18e-013b-45b3-b017-2c8d80c49c05\" (UID: \"c11bf18e-013b-45b3-b017-2c8d80c49c05\") " Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.736740 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c11bf18e-013b-45b3-b017-2c8d80c49c05-client-ca\") pod \"c11bf18e-013b-45b3-b017-2c8d80c49c05\" (UID: \"c11bf18e-013b-45b3-b017-2c8d80c49c05\") " Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.736772 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-client-ca\") pod \"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d\" (UID: \"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d\") " Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.736814 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75cpv\" (UniqueName: \"kubernetes.io/projected/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-kube-api-access-75cpv\") pod \"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d\" (UID: \"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d\") " Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.736849 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-serving-cert\") pod \"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d\" (UID: \"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d\") " Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.737053 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6wd6\" (UniqueName: \"kubernetes.io/projected/cd3e8a03-312f-4c90-a8f2-7117488b8079-kube-api-access-d6wd6\") pod \"route-controller-manager-54558c8944-qpd4v\" (UID: \"cd3e8a03-312f-4c90-a8f2-7117488b8079\") " pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.737084 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd3e8a03-312f-4c90-a8f2-7117488b8079-serving-cert\") pod \"route-controller-manager-54558c8944-qpd4v\" (UID: \"cd3e8a03-312f-4c90-a8f2-7117488b8079\") " pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.737112 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd3e8a03-312f-4c90-a8f2-7117488b8079-config\") pod \"route-controller-manager-54558c8944-qpd4v\" (UID: \"cd3e8a03-312f-4c90-a8f2-7117488b8079\") " pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.737139 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cd3e8a03-312f-4c90-a8f2-7117488b8079-client-ca\") pod \"route-controller-manager-54558c8944-qpd4v\" (UID: \"cd3e8a03-312f-4c90-a8f2-7117488b8079\") " pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.739189 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-config" (OuterVolumeSpecName: "config") pod "b4d3dc98-a521-4a77-b6c4-3ca51f818c6d" (UID: "b4d3dc98-a521-4a77-b6c4-3ca51f818c6d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.740370 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-client-ca" (OuterVolumeSpecName: "client-ca") pod "b4d3dc98-a521-4a77-b6c4-3ca51f818c6d" (UID: "b4d3dc98-a521-4a77-b6c4-3ca51f818c6d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.740386 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c11bf18e-013b-45b3-b017-2c8d80c49c05-client-ca" (OuterVolumeSpecName: "client-ca") pod "c11bf18e-013b-45b3-b017-2c8d80c49c05" (UID: "c11bf18e-013b-45b3-b017-2c8d80c49c05"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.740427 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c11bf18e-013b-45b3-b017-2c8d80c49c05-config" (OuterVolumeSpecName: "config") pod "c11bf18e-013b-45b3-b017-2c8d80c49c05" (UID: "c11bf18e-013b-45b3-b017-2c8d80c49c05"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.741430 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c11bf18e-013b-45b3-b017-2c8d80c49c05-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "c11bf18e-013b-45b3-b017-2c8d80c49c05" (UID: "c11bf18e-013b-45b3-b017-2c8d80c49c05"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.742037 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c11bf18e-013b-45b3-b017-2c8d80c49c05-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c11bf18e-013b-45b3-b017-2c8d80c49c05" (UID: "c11bf18e-013b-45b3-b017-2c8d80c49c05"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.742120 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c11bf18e-013b-45b3-b017-2c8d80c49c05-kube-api-access-8jn5r" (OuterVolumeSpecName: "kube-api-access-8jn5r") pod "c11bf18e-013b-45b3-b017-2c8d80c49c05" (UID: "c11bf18e-013b-45b3-b017-2c8d80c49c05"). InnerVolumeSpecName "kube-api-access-8jn5r". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.743665 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-kube-api-access-75cpv" (OuterVolumeSpecName: "kube-api-access-75cpv") pod "b4d3dc98-a521-4a77-b6c4-3ca51f818c6d" (UID: "b4d3dc98-a521-4a77-b6c4-3ca51f818c6d"). InnerVolumeSpecName "kube-api-access-75cpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.743779 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "b4d3dc98-a521-4a77-b6c4-3ca51f818c6d" (UID: "b4d3dc98-a521-4a77-b6c4-3ca51f818c6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.837866 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd3e8a03-312f-4c90-a8f2-7117488b8079-config\") pod \"route-controller-manager-54558c8944-qpd4v\" (UID: \"cd3e8a03-312f-4c90-a8f2-7117488b8079\") " pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.837974 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cd3e8a03-312f-4c90-a8f2-7117488b8079-client-ca\") pod \"route-controller-manager-54558c8944-qpd4v\" (UID: \"cd3e8a03-312f-4c90-a8f2-7117488b8079\") " pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.838114 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6wd6\" (UniqueName: \"kubernetes.io/projected/cd3e8a03-312f-4c90-a8f2-7117488b8079-kube-api-access-d6wd6\") pod \"route-controller-manager-54558c8944-qpd4v\" (UID: \"cd3e8a03-312f-4c90-a8f2-7117488b8079\") " pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.838155 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd3e8a03-312f-4c90-a8f2-7117488b8079-serving-cert\") pod \"route-controller-manager-54558c8944-qpd4v\" (UID: \"cd3e8a03-312f-4c90-a8f2-7117488b8079\") " pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.838217 4906 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/c11bf18e-013b-45b3-b017-2c8d80c49c05-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.838235 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c11bf18e-013b-45b3-b017-2c8d80c49c05-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.838252 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.838265 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c11bf18e-013b-45b3-b017-2c8d80c49c05-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.838278 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jn5r\" (UniqueName: \"kubernetes.io/projected/c11bf18e-013b-45b3-b017-2c8d80c49c05-kube-api-access-8jn5r\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.838275 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536356-bbt2l" event={"ID":"d3135b40-4120-49cf-9649-52a416ce5313","Type":"ContainerStarted","Data":"1fa551c060cb87871accf06577d2043171af0dc6d12520d8b7b37755ceaaaadb"} Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.838292 4906 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c11bf18e-013b-45b3-b017-2c8d80c49c05-client-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.838354 4906 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-client-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.838367 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75cpv\" (UniqueName: \"kubernetes.io/projected/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-kube-api-access-75cpv\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.838381 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.839332 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cd3e8a03-312f-4c90-a8f2-7117488b8079-client-ca\") pod \"route-controller-manager-54558c8944-qpd4v\" (UID: \"cd3e8a03-312f-4c90-a8f2-7117488b8079\") " pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.839372 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd3e8a03-312f-4c90-a8f2-7117488b8079-config\") pod \"route-controller-manager-54558c8944-qpd4v\" (UID: \"cd3e8a03-312f-4c90-a8f2-7117488b8079\") " pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.841823 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" event={"ID":"c11bf18e-013b-45b3-b017-2c8d80c49c05","Type":"ContainerDied","Data":"f7efa8c237d692d3b8b3fb1c4ab2e5ef1d7f1b4d17252b62fb1691b2e7fdf7c8"} Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.841906 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7bd5d7d548-m99r5" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.841921 4906 scope.go:117] "RemoveContainer" containerID="657c2183d0c07bd06e0bae7b19f12494a09fe7546f2b3be75b55775a1077f7cb" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.842566 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd3e8a03-312f-4c90-a8f2-7117488b8079-serving-cert\") pod \"route-controller-manager-54558c8944-qpd4v\" (UID: \"cd3e8a03-312f-4c90-a8f2-7117488b8079\") " pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.847346 4906 generic.go:334] "Generic (PLEG): container finished" podID="b4d3dc98-a521-4a77-b6c4-3ca51f818c6d" containerID="e6177de189f290d654f201fb72a11b14ae69170f6aca8df10bc8d88423b15f16" exitCode=0 Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.847418 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" event={"ID":"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d","Type":"ContainerDied","Data":"e6177de189f290d654f201fb72a11b14ae69170f6aca8df10bc8d88423b15f16"} Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.847437 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.847471 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw" event={"ID":"b4d3dc98-a521-4a77-b6c4-3ca51f818c6d","Type":"ContainerDied","Data":"d20879919e4497b907fa86943cb8f4341be84008a87feab6f5bdb3909f7b9aea"} Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.862137 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6wd6\" (UniqueName: \"kubernetes.io/projected/cd3e8a03-312f-4c90-a8f2-7117488b8079-kube-api-access-d6wd6\") pod \"route-controller-manager-54558c8944-qpd4v\" (UID: \"cd3e8a03-312f-4c90-a8f2-7117488b8079\") " pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.869598 4906 scope.go:117] "RemoveContainer" containerID="e6177de189f290d654f201fb72a11b14ae69170f6aca8df10bc8d88423b15f16" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.885822 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7bd5d7d548-m99r5"] Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.887603 4906 scope.go:117] "RemoveContainer" containerID="e6177de189f290d654f201fb72a11b14ae69170f6aca8df10bc8d88423b15f16" Feb 27 08:36:00 crc kubenswrapper[4906]: E0227 08:36:00.888325 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e6177de189f290d654f201fb72a11b14ae69170f6aca8df10bc8d88423b15f16\": container with ID starting with e6177de189f290d654f201fb72a11b14ae69170f6aca8df10bc8d88423b15f16 not found: ID does not exist" containerID="e6177de189f290d654f201fb72a11b14ae69170f6aca8df10bc8d88423b15f16" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.888412 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e6177de189f290d654f201fb72a11b14ae69170f6aca8df10bc8d88423b15f16"} err="failed to get container status \"e6177de189f290d654f201fb72a11b14ae69170f6aca8df10bc8d88423b15f16\": rpc error: code = NotFound desc = could not find container \"e6177de189f290d654f201fb72a11b14ae69170f6aca8df10bc8d88423b15f16\": container with ID starting with e6177de189f290d654f201fb72a11b14ae69170f6aca8df10bc8d88423b15f16 not found: ID does not exist" Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.894545 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7bd5d7d548-m99r5"] Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.899745 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw"] Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.905043 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-676bbd764-p56kw"] Feb 27 08:36:00 crc kubenswrapper[4906]: I0227 08:36:00.999038 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" Feb 27 08:36:01 crc kubenswrapper[4906]: I0227 08:36:01.443871 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v"] Feb 27 08:36:01 crc kubenswrapper[4906]: W0227 08:36:01.457978 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcd3e8a03_312f_4c90_a8f2_7117488b8079.slice/crio-444ea87020c768ca8d4f8acb24c1f82f051398b18fb424a31bbdfcea1de09d3f WatchSource:0}: Error finding container 444ea87020c768ca8d4f8acb24c1f82f051398b18fb424a31bbdfcea1de09d3f: Status 404 returned error can't find the container with id 444ea87020c768ca8d4f8acb24c1f82f051398b18fb424a31bbdfcea1de09d3f Feb 27 08:36:01 crc kubenswrapper[4906]: I0227 08:36:01.859934 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" event={"ID":"cd3e8a03-312f-4c90-a8f2-7117488b8079","Type":"ContainerStarted","Data":"444ea87020c768ca8d4f8acb24c1f82f051398b18fb424a31bbdfcea1de09d3f"} Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.560236 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4d3dc98-a521-4a77-b6c4-3ca51f818c6d" path="/var/lib/kubelet/pods/b4d3dc98-a521-4a77-b6c4-3ca51f818c6d/volumes" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.561324 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c11bf18e-013b-45b3-b017-2c8d80c49c05" path="/var/lib/kubelet/pods/c11bf18e-013b-45b3-b017-2c8d80c49c05/volumes" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.781604 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-dc6dc47d-c6jgl"] Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.782441 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.785930 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.786797 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.787107 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.787253 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.787325 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.787325 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.798816 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-dc6dc47d-c6jgl"] Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.803204 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.863998 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d08de847-3c0f-4a64-96ec-c0772b7620f0-proxy-ca-bundles\") pod \"controller-manager-dc6dc47d-c6jgl\" (UID: \"d08de847-3c0f-4a64-96ec-c0772b7620f0\") " pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.864082 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d08de847-3c0f-4a64-96ec-c0772b7620f0-serving-cert\") pod \"controller-manager-dc6dc47d-c6jgl\" (UID: \"d08de847-3c0f-4a64-96ec-c0772b7620f0\") " pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.864119 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d08de847-3c0f-4a64-96ec-c0772b7620f0-config\") pod \"controller-manager-dc6dc47d-c6jgl\" (UID: \"d08de847-3c0f-4a64-96ec-c0772b7620f0\") " pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.864294 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74zzj\" (UniqueName: \"kubernetes.io/projected/d08de847-3c0f-4a64-96ec-c0772b7620f0-kube-api-access-74zzj\") pod \"controller-manager-dc6dc47d-c6jgl\" (UID: \"d08de847-3c0f-4a64-96ec-c0772b7620f0\") " pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.864353 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d08de847-3c0f-4a64-96ec-c0772b7620f0-client-ca\") pod \"controller-manager-dc6dc47d-c6jgl\" (UID: \"d08de847-3c0f-4a64-96ec-c0772b7620f0\") " pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.868745 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" event={"ID":"cd3e8a03-312f-4c90-a8f2-7117488b8079","Type":"ContainerStarted","Data":"b7c0e7d55ec5e61fe4d07b9e48237d37283a26349cccc43eead0b92cac59a8df"} Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.869084 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.875906 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.893777 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" podStartSLOduration=3.893753437 podStartE2EDuration="3.893753437s" podCreationTimestamp="2026-02-27 08:35:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:36:02.891367405 +0000 UTC m=+461.285769025" watchObservedRunningTime="2026-02-27 08:36:02.893753437 +0000 UTC m=+461.288155047" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.966092 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d08de847-3c0f-4a64-96ec-c0772b7620f0-proxy-ca-bundles\") pod \"controller-manager-dc6dc47d-c6jgl\" (UID: \"d08de847-3c0f-4a64-96ec-c0772b7620f0\") " pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.966226 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d08de847-3c0f-4a64-96ec-c0772b7620f0-serving-cert\") pod \"controller-manager-dc6dc47d-c6jgl\" (UID: \"d08de847-3c0f-4a64-96ec-c0772b7620f0\") " pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.966266 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d08de847-3c0f-4a64-96ec-c0772b7620f0-config\") pod \"controller-manager-dc6dc47d-c6jgl\" (UID: \"d08de847-3c0f-4a64-96ec-c0772b7620f0\") " pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.966349 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74zzj\" (UniqueName: \"kubernetes.io/projected/d08de847-3c0f-4a64-96ec-c0772b7620f0-kube-api-access-74zzj\") pod \"controller-manager-dc6dc47d-c6jgl\" (UID: \"d08de847-3c0f-4a64-96ec-c0772b7620f0\") " pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.966438 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d08de847-3c0f-4a64-96ec-c0772b7620f0-client-ca\") pod \"controller-manager-dc6dc47d-c6jgl\" (UID: \"d08de847-3c0f-4a64-96ec-c0772b7620f0\") " pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.968158 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d08de847-3c0f-4a64-96ec-c0772b7620f0-proxy-ca-bundles\") pod \"controller-manager-dc6dc47d-c6jgl\" (UID: \"d08de847-3c0f-4a64-96ec-c0772b7620f0\") " pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.969924 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d08de847-3c0f-4a64-96ec-c0772b7620f0-client-ca\") pod \"controller-manager-dc6dc47d-c6jgl\" (UID: \"d08de847-3c0f-4a64-96ec-c0772b7620f0\") " pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.975399 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d08de847-3c0f-4a64-96ec-c0772b7620f0-serving-cert\") pod \"controller-manager-dc6dc47d-c6jgl\" (UID: \"d08de847-3c0f-4a64-96ec-c0772b7620f0\") " pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.980001 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d08de847-3c0f-4a64-96ec-c0772b7620f0-config\") pod \"controller-manager-dc6dc47d-c6jgl\" (UID: \"d08de847-3c0f-4a64-96ec-c0772b7620f0\") " pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:02 crc kubenswrapper[4906]: I0227 08:36:02.988125 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74zzj\" (UniqueName: \"kubernetes.io/projected/d08de847-3c0f-4a64-96ec-c0772b7620f0-kube-api-access-74zzj\") pod \"controller-manager-dc6dc47d-c6jgl\" (UID: \"d08de847-3c0f-4a64-96ec-c0772b7620f0\") " pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:03 crc kubenswrapper[4906]: I0227 08:36:03.104811 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:03 crc kubenswrapper[4906]: I0227 08:36:03.863913 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-dc6dc47d-c6jgl"] Feb 27 08:36:03 crc kubenswrapper[4906]: W0227 08:36:03.868812 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd08de847_3c0f_4a64_96ec_c0772b7620f0.slice/crio-e51b753e228faec37de0d096bbc81dc951c749214c35e17adfb738d0d099aa64 WatchSource:0}: Error finding container e51b753e228faec37de0d096bbc81dc951c749214c35e17adfb738d0d099aa64: Status 404 returned error can't find the container with id e51b753e228faec37de0d096bbc81dc951c749214c35e17adfb738d0d099aa64 Feb 27 08:36:03 crc kubenswrapper[4906]: I0227 08:36:03.876169 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" event={"ID":"d08de847-3c0f-4a64-96ec-c0772b7620f0","Type":"ContainerStarted","Data":"e51b753e228faec37de0d096bbc81dc951c749214c35e17adfb738d0d099aa64"} Feb 27 08:36:04 crc kubenswrapper[4906]: I0227 08:36:04.884021 4906 generic.go:334] "Generic (PLEG): container finished" podID="d3135b40-4120-49cf-9649-52a416ce5313" containerID="6cd34b41613ac3c336c40aa2ee3bfa66a88070b4c468ea66a4d5249aafd5cfb5" exitCode=0 Feb 27 08:36:04 crc kubenswrapper[4906]: I0227 08:36:04.884101 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536356-bbt2l" event={"ID":"d3135b40-4120-49cf-9649-52a416ce5313","Type":"ContainerDied","Data":"6cd34b41613ac3c336c40aa2ee3bfa66a88070b4c468ea66a4d5249aafd5cfb5"} Feb 27 08:36:04 crc kubenswrapper[4906]: I0227 08:36:04.887106 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" event={"ID":"d08de847-3c0f-4a64-96ec-c0772b7620f0","Type":"ContainerStarted","Data":"9c6bb13299a9893411c3553233a334473d82bf6f11ec3b5c36de80d287397c25"} Feb 27 08:36:04 crc kubenswrapper[4906]: I0227 08:36:04.887348 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:04 crc kubenswrapper[4906]: I0227 08:36:04.895323 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:04 crc kubenswrapper[4906]: I0227 08:36:04.935693 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" podStartSLOduration=5.935664038 podStartE2EDuration="5.935664038s" podCreationTimestamp="2026-02-27 08:35:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:36:04.93153414 +0000 UTC m=+463.325935750" watchObservedRunningTime="2026-02-27 08:36:04.935664038 +0000 UTC m=+463.330065648" Feb 27 08:36:05 crc kubenswrapper[4906]: I0227 08:36:05.769920 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-dc6dc47d-c6jgl"] Feb 27 08:36:05 crc kubenswrapper[4906]: I0227 08:36:05.786481 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v"] Feb 27 08:36:05 crc kubenswrapper[4906]: I0227 08:36:05.894556 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" podUID="cd3e8a03-312f-4c90-a8f2-7117488b8079" containerName="route-controller-manager" containerID="cri-o://b7c0e7d55ec5e61fe4d07b9e48237d37283a26349cccc43eead0b92cac59a8df" gracePeriod=30 Feb 27 08:36:06 crc kubenswrapper[4906]: I0227 08:36:06.177800 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536356-bbt2l" Feb 27 08:36:06 crc kubenswrapper[4906]: I0227 08:36:06.306225 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lz5rn"] Feb 27 08:36:06 crc kubenswrapper[4906]: I0227 08:36:06.310061 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qcdkv\" (UniqueName: \"kubernetes.io/projected/d3135b40-4120-49cf-9649-52a416ce5313-kube-api-access-qcdkv\") pod \"d3135b40-4120-49cf-9649-52a416ce5313\" (UID: \"d3135b40-4120-49cf-9649-52a416ce5313\") " Feb 27 08:36:06 crc kubenswrapper[4906]: I0227 08:36:06.316742 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3135b40-4120-49cf-9649-52a416ce5313-kube-api-access-qcdkv" (OuterVolumeSpecName: "kube-api-access-qcdkv") pod "d3135b40-4120-49cf-9649-52a416ce5313" (UID: "d3135b40-4120-49cf-9649-52a416ce5313"). InnerVolumeSpecName "kube-api-access-qcdkv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:36:06 crc kubenswrapper[4906]: I0227 08:36:06.412661 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qcdkv\" (UniqueName: \"kubernetes.io/projected/d3135b40-4120-49cf-9649-52a416ce5313-kube-api-access-qcdkv\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:06 crc kubenswrapper[4906]: I0227 08:36:06.908916 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536356-bbt2l" event={"ID":"d3135b40-4120-49cf-9649-52a416ce5313","Type":"ContainerDied","Data":"1fa551c060cb87871accf06577d2043171af0dc6d12520d8b7b37755ceaaaadb"} Feb 27 08:36:06 crc kubenswrapper[4906]: I0227 08:36:06.908969 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536356-bbt2l" Feb 27 08:36:06 crc kubenswrapper[4906]: I0227 08:36:06.908991 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1fa551c060cb87871accf06577d2043171af0dc6d12520d8b7b37755ceaaaadb" Feb 27 08:36:06 crc kubenswrapper[4906]: I0227 08:36:06.911107 4906 generic.go:334] "Generic (PLEG): container finished" podID="cd3e8a03-312f-4c90-a8f2-7117488b8079" containerID="b7c0e7d55ec5e61fe4d07b9e48237d37283a26349cccc43eead0b92cac59a8df" exitCode=0 Feb 27 08:36:06 crc kubenswrapper[4906]: I0227 08:36:06.911592 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" event={"ID":"cd3e8a03-312f-4c90-a8f2-7117488b8079","Type":"ContainerDied","Data":"b7c0e7d55ec5e61fe4d07b9e48237d37283a26349cccc43eead0b92cac59a8df"} Feb 27 08:36:06 crc kubenswrapper[4906]: I0227 08:36:06.911294 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" podUID="d08de847-3c0f-4a64-96ec-c0772b7620f0" containerName="controller-manager" containerID="cri-o://9c6bb13299a9893411c3553233a334473d82bf6f11ec3b5c36de80d287397c25" gracePeriod=30 Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.115448 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.223352 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd3e8a03-312f-4c90-a8f2-7117488b8079-config\") pod \"cd3e8a03-312f-4c90-a8f2-7117488b8079\" (UID: \"cd3e8a03-312f-4c90-a8f2-7117488b8079\") " Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.223908 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd3e8a03-312f-4c90-a8f2-7117488b8079-serving-cert\") pod \"cd3e8a03-312f-4c90-a8f2-7117488b8079\" (UID: \"cd3e8a03-312f-4c90-a8f2-7117488b8079\") " Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.224012 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6wd6\" (UniqueName: \"kubernetes.io/projected/cd3e8a03-312f-4c90-a8f2-7117488b8079-kube-api-access-d6wd6\") pod \"cd3e8a03-312f-4c90-a8f2-7117488b8079\" (UID: \"cd3e8a03-312f-4c90-a8f2-7117488b8079\") " Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.224061 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cd3e8a03-312f-4c90-a8f2-7117488b8079-client-ca\") pod \"cd3e8a03-312f-4c90-a8f2-7117488b8079\" (UID: \"cd3e8a03-312f-4c90-a8f2-7117488b8079\") " Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.225063 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd3e8a03-312f-4c90-a8f2-7117488b8079-client-ca" (OuterVolumeSpecName: "client-ca") pod "cd3e8a03-312f-4c90-a8f2-7117488b8079" (UID: "cd3e8a03-312f-4c90-a8f2-7117488b8079"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.226967 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cd3e8a03-312f-4c90-a8f2-7117488b8079-config" (OuterVolumeSpecName: "config") pod "cd3e8a03-312f-4c90-a8f2-7117488b8079" (UID: "cd3e8a03-312f-4c90-a8f2-7117488b8079"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.229748 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd3e8a03-312f-4c90-a8f2-7117488b8079-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "cd3e8a03-312f-4c90-a8f2-7117488b8079" (UID: "cd3e8a03-312f-4c90-a8f2-7117488b8079"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.230310 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd3e8a03-312f-4c90-a8f2-7117488b8079-kube-api-access-d6wd6" (OuterVolumeSpecName: "kube-api-access-d6wd6") pod "cd3e8a03-312f-4c90-a8f2-7117488b8079" (UID: "cd3e8a03-312f-4c90-a8f2-7117488b8079"). InnerVolumeSpecName "kube-api-access-d6wd6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.248825 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536350-85r2j"] Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.252454 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536350-85r2j"] Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.325136 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd3e8a03-312f-4c90-a8f2-7117488b8079-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.325177 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd3e8a03-312f-4c90-a8f2-7117488b8079-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.325189 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6wd6\" (UniqueName: \"kubernetes.io/projected/cd3e8a03-312f-4c90-a8f2-7117488b8079-kube-api-access-d6wd6\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.325200 4906 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cd3e8a03-312f-4c90-a8f2-7117488b8079-client-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.787281 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx"] Feb 27 08:36:08 crc kubenswrapper[4906]: E0227 08:36:07.787590 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3135b40-4120-49cf-9649-52a416ce5313" containerName="oc" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.787607 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3135b40-4120-49cf-9649-52a416ce5313" containerName="oc" Feb 27 08:36:08 crc kubenswrapper[4906]: E0227 08:36:07.787621 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd3e8a03-312f-4c90-a8f2-7117488b8079" containerName="route-controller-manager" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.787631 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd3e8a03-312f-4c90-a8f2-7117488b8079" containerName="route-controller-manager" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.787760 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3135b40-4120-49cf-9649-52a416ce5313" containerName="oc" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.787780 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd3e8a03-312f-4c90-a8f2-7117488b8079" containerName="route-controller-manager" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.788343 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.804334 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx"] Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.831642 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5308e4bc-2457-46dd-899d-403fc81a4fee-serving-cert\") pod \"route-controller-manager-5489c94678-nd5fx\" (UID: \"5308e4bc-2457-46dd-899d-403fc81a4fee\") " pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.831727 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5308e4bc-2457-46dd-899d-403fc81a4fee-config\") pod \"route-controller-manager-5489c94678-nd5fx\" (UID: \"5308e4bc-2457-46dd-899d-403fc81a4fee\") " pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.831758 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46555\" (UniqueName: \"kubernetes.io/projected/5308e4bc-2457-46dd-899d-403fc81a4fee-kube-api-access-46555\") pod \"route-controller-manager-5489c94678-nd5fx\" (UID: \"5308e4bc-2457-46dd-899d-403fc81a4fee\") " pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.831906 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5308e4bc-2457-46dd-899d-403fc81a4fee-client-ca\") pod \"route-controller-manager-5489c94678-nd5fx\" (UID: \"5308e4bc-2457-46dd-899d-403fc81a4fee\") " pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.920498 4906 generic.go:334] "Generic (PLEG): container finished" podID="d08de847-3c0f-4a64-96ec-c0772b7620f0" containerID="9c6bb13299a9893411c3553233a334473d82bf6f11ec3b5c36de80d287397c25" exitCode=0 Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.920581 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" event={"ID":"d08de847-3c0f-4a64-96ec-c0772b7620f0","Type":"ContainerDied","Data":"9c6bb13299a9893411c3553233a334473d82bf6f11ec3b5c36de80d287397c25"} Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.922385 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" event={"ID":"cd3e8a03-312f-4c90-a8f2-7117488b8079","Type":"ContainerDied","Data":"444ea87020c768ca8d4f8acb24c1f82f051398b18fb424a31bbdfcea1de09d3f"} Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.922449 4906 scope.go:117] "RemoveContainer" containerID="b7c0e7d55ec5e61fe4d07b9e48237d37283a26349cccc43eead0b92cac59a8df" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.922452 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.933178 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46555\" (UniqueName: \"kubernetes.io/projected/5308e4bc-2457-46dd-899d-403fc81a4fee-kube-api-access-46555\") pod \"route-controller-manager-5489c94678-nd5fx\" (UID: \"5308e4bc-2457-46dd-899d-403fc81a4fee\") " pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.933252 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5308e4bc-2457-46dd-899d-403fc81a4fee-client-ca\") pod \"route-controller-manager-5489c94678-nd5fx\" (UID: \"5308e4bc-2457-46dd-899d-403fc81a4fee\") " pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.933299 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5308e4bc-2457-46dd-899d-403fc81a4fee-serving-cert\") pod \"route-controller-manager-5489c94678-nd5fx\" (UID: \"5308e4bc-2457-46dd-899d-403fc81a4fee\") " pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.933362 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5308e4bc-2457-46dd-899d-403fc81a4fee-config\") pod \"route-controller-manager-5489c94678-nd5fx\" (UID: \"5308e4bc-2457-46dd-899d-403fc81a4fee\") " pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.934941 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5308e4bc-2457-46dd-899d-403fc81a4fee-client-ca\") pod \"route-controller-manager-5489c94678-nd5fx\" (UID: \"5308e4bc-2457-46dd-899d-403fc81a4fee\") " pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.935239 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5308e4bc-2457-46dd-899d-403fc81a4fee-config\") pod \"route-controller-manager-5489c94678-nd5fx\" (UID: \"5308e4bc-2457-46dd-899d-403fc81a4fee\") " pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.938743 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5308e4bc-2457-46dd-899d-403fc81a4fee-serving-cert\") pod \"route-controller-manager-5489c94678-nd5fx\" (UID: \"5308e4bc-2457-46dd-899d-403fc81a4fee\") " pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.955549 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v"] Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.957197 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46555\" (UniqueName: \"kubernetes.io/projected/5308e4bc-2457-46dd-899d-403fc81a4fee-kube-api-access-46555\") pod \"route-controller-manager-5489c94678-nd5fx\" (UID: \"5308e4bc-2457-46dd-899d-403fc81a4fee\") " pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:07.959324 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-54558c8944-qpd4v"] Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:08.124925 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:08.561191 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd3e8a03-312f-4c90-a8f2-7117488b8079" path="/var/lib/kubelet/pods/cd3e8a03-312f-4c90-a8f2-7117488b8079/volumes" Feb 27 08:36:08 crc kubenswrapper[4906]: I0227 08:36:08.562209 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f743212c-ed63-408e-8063-ed04c8a7a1a9" path="/var/lib/kubelet/pods/f743212c-ed63-408e-8063-ed04c8a7a1a9/volumes" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.157550 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx"] Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.299523 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.349681 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d08de847-3c0f-4a64-96ec-c0772b7620f0-proxy-ca-bundles\") pod \"d08de847-3c0f-4a64-96ec-c0772b7620f0\" (UID: \"d08de847-3c0f-4a64-96ec-c0772b7620f0\") " Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.349733 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d08de847-3c0f-4a64-96ec-c0772b7620f0-config\") pod \"d08de847-3c0f-4a64-96ec-c0772b7620f0\" (UID: \"d08de847-3c0f-4a64-96ec-c0772b7620f0\") " Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.349753 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-74zzj\" (UniqueName: \"kubernetes.io/projected/d08de847-3c0f-4a64-96ec-c0772b7620f0-kube-api-access-74zzj\") pod \"d08de847-3c0f-4a64-96ec-c0772b7620f0\" (UID: \"d08de847-3c0f-4a64-96ec-c0772b7620f0\") " Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.349801 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d08de847-3c0f-4a64-96ec-c0772b7620f0-serving-cert\") pod \"d08de847-3c0f-4a64-96ec-c0772b7620f0\" (UID: \"d08de847-3c0f-4a64-96ec-c0772b7620f0\") " Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.349911 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d08de847-3c0f-4a64-96ec-c0772b7620f0-client-ca\") pod \"d08de847-3c0f-4a64-96ec-c0772b7620f0\" (UID: \"d08de847-3c0f-4a64-96ec-c0772b7620f0\") " Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.351656 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d08de847-3c0f-4a64-96ec-c0772b7620f0-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "d08de847-3c0f-4a64-96ec-c0772b7620f0" (UID: "d08de847-3c0f-4a64-96ec-c0772b7620f0"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.351954 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d08de847-3c0f-4a64-96ec-c0772b7620f0-config" (OuterVolumeSpecName: "config") pod "d08de847-3c0f-4a64-96ec-c0772b7620f0" (UID: "d08de847-3c0f-4a64-96ec-c0772b7620f0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.352319 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d08de847-3c0f-4a64-96ec-c0772b7620f0-client-ca" (OuterVolumeSpecName: "client-ca") pod "d08de847-3c0f-4a64-96ec-c0772b7620f0" (UID: "d08de847-3c0f-4a64-96ec-c0772b7620f0"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.359167 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d08de847-3c0f-4a64-96ec-c0772b7620f0-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d08de847-3c0f-4a64-96ec-c0772b7620f0" (UID: "d08de847-3c0f-4a64-96ec-c0772b7620f0"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.359167 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d08de847-3c0f-4a64-96ec-c0772b7620f0-kube-api-access-74zzj" (OuterVolumeSpecName: "kube-api-access-74zzj") pod "d08de847-3c0f-4a64-96ec-c0772b7620f0" (UID: "d08de847-3c0f-4a64-96ec-c0772b7620f0"). InnerVolumeSpecName "kube-api-access-74zzj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.450910 4906 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d08de847-3c0f-4a64-96ec-c0772b7620f0-client-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.450963 4906 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d08de847-3c0f-4a64-96ec-c0772b7620f0-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.450986 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d08de847-3c0f-4a64-96ec-c0772b7620f0-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.451006 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-74zzj\" (UniqueName: \"kubernetes.io/projected/d08de847-3c0f-4a64-96ec-c0772b7620f0-kube-api-access-74zzj\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.451025 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d08de847-3c0f-4a64-96ec-c0772b7620f0-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.787428 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9"] Feb 27 08:36:09 crc kubenswrapper[4906]: E0227 08:36:09.787669 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d08de847-3c0f-4a64-96ec-c0772b7620f0" containerName="controller-manager" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.787682 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="d08de847-3c0f-4a64-96ec-c0772b7620f0" containerName="controller-manager" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.787783 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="d08de847-3c0f-4a64-96ec-c0772b7620f0" containerName="controller-manager" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.788249 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.796727 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9"] Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.855551 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/115f142e-6788-4c2d-a9ca-98e832c80810-serving-cert\") pod \"controller-manager-6bdd6f5f98-86nc9\" (UID: \"115f142e-6788-4c2d-a9ca-98e832c80810\") " pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.855606 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/115f142e-6788-4c2d-a9ca-98e832c80810-client-ca\") pod \"controller-manager-6bdd6f5f98-86nc9\" (UID: \"115f142e-6788-4c2d-a9ca-98e832c80810\") " pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.855669 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8r4tm\" (UniqueName: \"kubernetes.io/projected/115f142e-6788-4c2d-a9ca-98e832c80810-kube-api-access-8r4tm\") pod \"controller-manager-6bdd6f5f98-86nc9\" (UID: \"115f142e-6788-4c2d-a9ca-98e832c80810\") " pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.855828 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/115f142e-6788-4c2d-a9ca-98e832c80810-config\") pod \"controller-manager-6bdd6f5f98-86nc9\" (UID: \"115f142e-6788-4c2d-a9ca-98e832c80810\") " pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.855949 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/115f142e-6788-4c2d-a9ca-98e832c80810-proxy-ca-bundles\") pod \"controller-manager-6bdd6f5f98-86nc9\" (UID: \"115f142e-6788-4c2d-a9ca-98e832c80810\") " pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.934953 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" event={"ID":"5308e4bc-2457-46dd-899d-403fc81a4fee","Type":"ContainerStarted","Data":"43f7fab25b17a7ae9871ab9211255ca39287c413ded39cb82db855b761aa227c"} Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.935015 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" event={"ID":"5308e4bc-2457-46dd-899d-403fc81a4fee","Type":"ContainerStarted","Data":"8c49334e7446cf59f358fcf6e01e28a473628971df531b8f608fbbd6d75b8931"} Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.935216 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.936465 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" event={"ID":"d08de847-3c0f-4a64-96ec-c0772b7620f0","Type":"ContainerDied","Data":"e51b753e228faec37de0d096bbc81dc951c749214c35e17adfb738d0d099aa64"} Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.936501 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-dc6dc47d-c6jgl" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.936541 4906 scope.go:117] "RemoveContainer" containerID="9c6bb13299a9893411c3553233a334473d82bf6f11ec3b5c36de80d287397c25" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.955708 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" podStartSLOduration=4.955684117 podStartE2EDuration="4.955684117s" podCreationTimestamp="2026-02-27 08:36:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:36:09.952965446 +0000 UTC m=+468.347367066" watchObservedRunningTime="2026-02-27 08:36:09.955684117 +0000 UTC m=+468.350085737" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.957276 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/115f142e-6788-4c2d-a9ca-98e832c80810-config\") pod \"controller-manager-6bdd6f5f98-86nc9\" (UID: \"115f142e-6788-4c2d-a9ca-98e832c80810\") " pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.957348 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/115f142e-6788-4c2d-a9ca-98e832c80810-proxy-ca-bundles\") pod \"controller-manager-6bdd6f5f98-86nc9\" (UID: \"115f142e-6788-4c2d-a9ca-98e832c80810\") " pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.957392 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/115f142e-6788-4c2d-a9ca-98e832c80810-serving-cert\") pod \"controller-manager-6bdd6f5f98-86nc9\" (UID: \"115f142e-6788-4c2d-a9ca-98e832c80810\") " pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.957421 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/115f142e-6788-4c2d-a9ca-98e832c80810-client-ca\") pod \"controller-manager-6bdd6f5f98-86nc9\" (UID: \"115f142e-6788-4c2d-a9ca-98e832c80810\") " pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.957467 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8r4tm\" (UniqueName: \"kubernetes.io/projected/115f142e-6788-4c2d-a9ca-98e832c80810-kube-api-access-8r4tm\") pod \"controller-manager-6bdd6f5f98-86nc9\" (UID: \"115f142e-6788-4c2d-a9ca-98e832c80810\") " pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.960242 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/115f142e-6788-4c2d-a9ca-98e832c80810-client-ca\") pod \"controller-manager-6bdd6f5f98-86nc9\" (UID: \"115f142e-6788-4c2d-a9ca-98e832c80810\") " pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.961209 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/115f142e-6788-4c2d-a9ca-98e832c80810-proxy-ca-bundles\") pod \"controller-manager-6bdd6f5f98-86nc9\" (UID: \"115f142e-6788-4c2d-a9ca-98e832c80810\") " pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.963655 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/115f142e-6788-4c2d-a9ca-98e832c80810-config\") pod \"controller-manager-6bdd6f5f98-86nc9\" (UID: \"115f142e-6788-4c2d-a9ca-98e832c80810\") " pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.964150 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/115f142e-6788-4c2d-a9ca-98e832c80810-serving-cert\") pod \"controller-manager-6bdd6f5f98-86nc9\" (UID: \"115f142e-6788-4c2d-a9ca-98e832c80810\") " pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.976349 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-dc6dc47d-c6jgl"] Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.980452 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8r4tm\" (UniqueName: \"kubernetes.io/projected/115f142e-6788-4c2d-a9ca-98e832c80810-kube-api-access-8r4tm\") pod \"controller-manager-6bdd6f5f98-86nc9\" (UID: \"115f142e-6788-4c2d-a9ca-98e832c80810\") " pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:09 crc kubenswrapper[4906]: I0227 08:36:09.986650 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-dc6dc47d-c6jgl"] Feb 27 08:36:10 crc kubenswrapper[4906]: I0227 08:36:10.018779 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" Feb 27 08:36:10 crc kubenswrapper[4906]: I0227 08:36:10.106923 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:10 crc kubenswrapper[4906]: I0227 08:36:10.355423 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9"] Feb 27 08:36:10 crc kubenswrapper[4906]: I0227 08:36:10.561687 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d08de847-3c0f-4a64-96ec-c0772b7620f0" path="/var/lib/kubelet/pods/d08de847-3c0f-4a64-96ec-c0772b7620f0/volumes" Feb 27 08:36:10 crc kubenswrapper[4906]: I0227 08:36:10.944906 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" event={"ID":"115f142e-6788-4c2d-a9ca-98e832c80810","Type":"ContainerStarted","Data":"c0e7e6ada00d294f6b0cb2ed78986e3738a7755a3eeedfa51260fcedfc2f0ba4"} Feb 27 08:36:10 crc kubenswrapper[4906]: I0227 08:36:10.944957 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" event={"ID":"115f142e-6788-4c2d-a9ca-98e832c80810","Type":"ContainerStarted","Data":"bc2aa7b5a37d380558480588f101ec54e903b0bc9b6e4645e23fe0a3c9c4f6d3"} Feb 27 08:36:10 crc kubenswrapper[4906]: I0227 08:36:10.945301 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:10 crc kubenswrapper[4906]: I0227 08:36:10.955516 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:10 crc kubenswrapper[4906]: I0227 08:36:10.975695 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" podStartSLOduration=5.975669933 podStartE2EDuration="5.975669933s" podCreationTimestamp="2026-02-27 08:36:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:36:10.975232472 +0000 UTC m=+469.369634082" watchObservedRunningTime="2026-02-27 08:36:10.975669933 +0000 UTC m=+469.370071543" Feb 27 08:36:24 crc kubenswrapper[4906]: I0227 08:36:24.844514 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:36:24 crc kubenswrapper[4906]: I0227 08:36:24.845520 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:36:25 crc kubenswrapper[4906]: I0227 08:36:25.783515 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9"] Feb 27 08:36:25 crc kubenswrapper[4906]: I0227 08:36:25.783799 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" podUID="115f142e-6788-4c2d-a9ca-98e832c80810" containerName="controller-manager" containerID="cri-o://c0e7e6ada00d294f6b0cb2ed78986e3738a7755a3eeedfa51260fcedfc2f0ba4" gracePeriod=30 Feb 27 08:36:25 crc kubenswrapper[4906]: I0227 08:36:25.804109 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx"] Feb 27 08:36:25 crc kubenswrapper[4906]: I0227 08:36:25.804358 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" podUID="5308e4bc-2457-46dd-899d-403fc81a4fee" containerName="route-controller-manager" containerID="cri-o://43f7fab25b17a7ae9871ab9211255ca39287c413ded39cb82db855b761aa227c" gracePeriod=30 Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.039947 4906 generic.go:334] "Generic (PLEG): container finished" podID="5308e4bc-2457-46dd-899d-403fc81a4fee" containerID="43f7fab25b17a7ae9871ab9211255ca39287c413ded39cb82db855b761aa227c" exitCode=0 Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.040038 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" event={"ID":"5308e4bc-2457-46dd-899d-403fc81a4fee","Type":"ContainerDied","Data":"43f7fab25b17a7ae9871ab9211255ca39287c413ded39cb82db855b761aa227c"} Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.042012 4906 generic.go:334] "Generic (PLEG): container finished" podID="115f142e-6788-4c2d-a9ca-98e832c80810" containerID="c0e7e6ada00d294f6b0cb2ed78986e3738a7755a3eeedfa51260fcedfc2f0ba4" exitCode=0 Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.042065 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" event={"ID":"115f142e-6788-4c2d-a9ca-98e832c80810","Type":"ContainerDied","Data":"c0e7e6ada00d294f6b0cb2ed78986e3738a7755a3eeedfa51260fcedfc2f0ba4"} Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.314173 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.382255 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46555\" (UniqueName: \"kubernetes.io/projected/5308e4bc-2457-46dd-899d-403fc81a4fee-kube-api-access-46555\") pod \"5308e4bc-2457-46dd-899d-403fc81a4fee\" (UID: \"5308e4bc-2457-46dd-899d-403fc81a4fee\") " Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.382601 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5308e4bc-2457-46dd-899d-403fc81a4fee-serving-cert\") pod \"5308e4bc-2457-46dd-899d-403fc81a4fee\" (UID: \"5308e4bc-2457-46dd-899d-403fc81a4fee\") " Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.382736 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5308e4bc-2457-46dd-899d-403fc81a4fee-client-ca\") pod \"5308e4bc-2457-46dd-899d-403fc81a4fee\" (UID: \"5308e4bc-2457-46dd-899d-403fc81a4fee\") " Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.382814 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5308e4bc-2457-46dd-899d-403fc81a4fee-config\") pod \"5308e4bc-2457-46dd-899d-403fc81a4fee\" (UID: \"5308e4bc-2457-46dd-899d-403fc81a4fee\") " Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.383798 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5308e4bc-2457-46dd-899d-403fc81a4fee-client-ca" (OuterVolumeSpecName: "client-ca") pod "5308e4bc-2457-46dd-899d-403fc81a4fee" (UID: "5308e4bc-2457-46dd-899d-403fc81a4fee"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.383846 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5308e4bc-2457-46dd-899d-403fc81a4fee-config" (OuterVolumeSpecName: "config") pod "5308e4bc-2457-46dd-899d-403fc81a4fee" (UID: "5308e4bc-2457-46dd-899d-403fc81a4fee"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.390532 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5308e4bc-2457-46dd-899d-403fc81a4fee-kube-api-access-46555" (OuterVolumeSpecName: "kube-api-access-46555") pod "5308e4bc-2457-46dd-899d-403fc81a4fee" (UID: "5308e4bc-2457-46dd-899d-403fc81a4fee"). InnerVolumeSpecName "kube-api-access-46555". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.390515 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5308e4bc-2457-46dd-899d-403fc81a4fee-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5308e4bc-2457-46dd-899d-403fc81a4fee" (UID: "5308e4bc-2457-46dd-899d-403fc81a4fee"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.447408 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.483758 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/115f142e-6788-4c2d-a9ca-98e832c80810-serving-cert\") pod \"115f142e-6788-4c2d-a9ca-98e832c80810\" (UID: \"115f142e-6788-4c2d-a9ca-98e832c80810\") " Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.483810 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/115f142e-6788-4c2d-a9ca-98e832c80810-config\") pod \"115f142e-6788-4c2d-a9ca-98e832c80810\" (UID: \"115f142e-6788-4c2d-a9ca-98e832c80810\") " Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.483893 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/115f142e-6788-4c2d-a9ca-98e832c80810-client-ca\") pod \"115f142e-6788-4c2d-a9ca-98e832c80810\" (UID: \"115f142e-6788-4c2d-a9ca-98e832c80810\") " Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.483915 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/115f142e-6788-4c2d-a9ca-98e832c80810-proxy-ca-bundles\") pod \"115f142e-6788-4c2d-a9ca-98e832c80810\" (UID: \"115f142e-6788-4c2d-a9ca-98e832c80810\") " Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.483990 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8r4tm\" (UniqueName: \"kubernetes.io/projected/115f142e-6788-4c2d-a9ca-98e832c80810-kube-api-access-8r4tm\") pod \"115f142e-6788-4c2d-a9ca-98e832c80810\" (UID: \"115f142e-6788-4c2d-a9ca-98e832c80810\") " Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.484207 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5308e4bc-2457-46dd-899d-403fc81a4fee-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.484231 4906 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5308e4bc-2457-46dd-899d-403fc81a4fee-client-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.484249 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5308e4bc-2457-46dd-899d-403fc81a4fee-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.484262 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46555\" (UniqueName: \"kubernetes.io/projected/5308e4bc-2457-46dd-899d-403fc81a4fee-kube-api-access-46555\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.484820 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/115f142e-6788-4c2d-a9ca-98e832c80810-client-ca" (OuterVolumeSpecName: "client-ca") pod "115f142e-6788-4c2d-a9ca-98e832c80810" (UID: "115f142e-6788-4c2d-a9ca-98e832c80810"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.485131 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/115f142e-6788-4c2d-a9ca-98e832c80810-config" (OuterVolumeSpecName: "config") pod "115f142e-6788-4c2d-a9ca-98e832c80810" (UID: "115f142e-6788-4c2d-a9ca-98e832c80810"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.485122 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/115f142e-6788-4c2d-a9ca-98e832c80810-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "115f142e-6788-4c2d-a9ca-98e832c80810" (UID: "115f142e-6788-4c2d-a9ca-98e832c80810"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.488015 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/115f142e-6788-4c2d-a9ca-98e832c80810-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "115f142e-6788-4c2d-a9ca-98e832c80810" (UID: "115f142e-6788-4c2d-a9ca-98e832c80810"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.489118 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/115f142e-6788-4c2d-a9ca-98e832c80810-kube-api-access-8r4tm" (OuterVolumeSpecName: "kube-api-access-8r4tm") pod "115f142e-6788-4c2d-a9ca-98e832c80810" (UID: "115f142e-6788-4c2d-a9ca-98e832c80810"). InnerVolumeSpecName "kube-api-access-8r4tm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.584986 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/115f142e-6788-4c2d-a9ca-98e832c80810-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.585016 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/115f142e-6788-4c2d-a9ca-98e832c80810-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.585025 4906 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/115f142e-6788-4c2d-a9ca-98e832c80810-client-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.585033 4906 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/115f142e-6788-4c2d-a9ca-98e832c80810-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:26 crc kubenswrapper[4906]: I0227 08:36:26.585047 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8r4tm\" (UniqueName: \"kubernetes.io/projected/115f142e-6788-4c2d-a9ca-98e832c80810-kube-api-access-8r4tm\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.052701 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" event={"ID":"115f142e-6788-4c2d-a9ca-98e832c80810","Type":"ContainerDied","Data":"bc2aa7b5a37d380558480588f101ec54e903b0bc9b6e4645e23fe0a3c9c4f6d3"} Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.052745 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.053315 4906 scope.go:117] "RemoveContainer" containerID="c0e7e6ada00d294f6b0cb2ed78986e3738a7755a3eeedfa51260fcedfc2f0ba4" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.055336 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" event={"ID":"5308e4bc-2457-46dd-899d-403fc81a4fee","Type":"ContainerDied","Data":"8c49334e7446cf59f358fcf6e01e28a473628971df531b8f608fbbd6d75b8931"} Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.055428 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.077086 4906 scope.go:117] "RemoveContainer" containerID="43f7fab25b17a7ae9871ab9211255ca39287c413ded39cb82db855b761aa227c" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.082442 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9"] Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.093093 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6bdd6f5f98-86nc9"] Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.098848 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx"] Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.106135 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5489c94678-nd5fx"] Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.810513 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf"] Feb 27 08:36:27 crc kubenswrapper[4906]: E0227 08:36:27.811533 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5308e4bc-2457-46dd-899d-403fc81a4fee" containerName="route-controller-manager" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.811608 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="5308e4bc-2457-46dd-899d-403fc81a4fee" containerName="route-controller-manager" Feb 27 08:36:27 crc kubenswrapper[4906]: E0227 08:36:27.812022 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="115f142e-6788-4c2d-a9ca-98e832c80810" containerName="controller-manager" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.812124 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="115f142e-6788-4c2d-a9ca-98e832c80810" containerName="controller-manager" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.812430 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="115f142e-6788-4c2d-a9ca-98e832c80810" containerName="controller-manager" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.812499 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="5308e4bc-2457-46dd-899d-403fc81a4fee" containerName="route-controller-manager" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.813493 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.814312 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6986774b4f-4576f"] Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.815199 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.819730 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.820790 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.821663 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.829968 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.830937 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.832586 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.834315 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.844051 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.844671 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.845092 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.845439 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.848174 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.861985 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf"] Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.866402 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.869916 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6986774b4f-4576f"] Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.904628 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/93c5dea3-7d27-439b-8219-4178cae7536b-client-ca\") pod \"route-controller-manager-5fc8cf4bff-4bkhf\" (UID: \"93c5dea3-7d27-439b-8219-4178cae7536b\") " pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.904926 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93c5dea3-7d27-439b-8219-4178cae7536b-serving-cert\") pod \"route-controller-manager-5fc8cf4bff-4bkhf\" (UID: \"93c5dea3-7d27-439b-8219-4178cae7536b\") " pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.905006 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d91175f3-23aa-43ee-b193-3661c803b68c-serving-cert\") pod \"controller-manager-6986774b4f-4576f\" (UID: \"d91175f3-23aa-43ee-b193-3661c803b68c\") " pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.905077 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d91175f3-23aa-43ee-b193-3661c803b68c-client-ca\") pod \"controller-manager-6986774b4f-4576f\" (UID: \"d91175f3-23aa-43ee-b193-3661c803b68c\") " pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.905547 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bl7kk\" (UniqueName: \"kubernetes.io/projected/d91175f3-23aa-43ee-b193-3661c803b68c-kube-api-access-bl7kk\") pod \"controller-manager-6986774b4f-4576f\" (UID: \"d91175f3-23aa-43ee-b193-3661c803b68c\") " pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.905670 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d91175f3-23aa-43ee-b193-3661c803b68c-proxy-ca-bundles\") pod \"controller-manager-6986774b4f-4576f\" (UID: \"d91175f3-23aa-43ee-b193-3661c803b68c\") " pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.905761 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93c5dea3-7d27-439b-8219-4178cae7536b-config\") pod \"route-controller-manager-5fc8cf4bff-4bkhf\" (UID: \"93c5dea3-7d27-439b-8219-4178cae7536b\") " pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.906329 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d91175f3-23aa-43ee-b193-3661c803b68c-config\") pod \"controller-manager-6986774b4f-4576f\" (UID: \"d91175f3-23aa-43ee-b193-3661c803b68c\") " pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:27 crc kubenswrapper[4906]: I0227 08:36:27.906418 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzkxv\" (UniqueName: \"kubernetes.io/projected/93c5dea3-7d27-439b-8219-4178cae7536b-kube-api-access-mzkxv\") pod \"route-controller-manager-5fc8cf4bff-4bkhf\" (UID: \"93c5dea3-7d27-439b-8219-4178cae7536b\") " pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.007848 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d91175f3-23aa-43ee-b193-3661c803b68c-config\") pod \"controller-manager-6986774b4f-4576f\" (UID: \"d91175f3-23aa-43ee-b193-3661c803b68c\") " pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.007982 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzkxv\" (UniqueName: \"kubernetes.io/projected/93c5dea3-7d27-439b-8219-4178cae7536b-kube-api-access-mzkxv\") pod \"route-controller-manager-5fc8cf4bff-4bkhf\" (UID: \"93c5dea3-7d27-439b-8219-4178cae7536b\") " pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.008035 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/93c5dea3-7d27-439b-8219-4178cae7536b-client-ca\") pod \"route-controller-manager-5fc8cf4bff-4bkhf\" (UID: \"93c5dea3-7d27-439b-8219-4178cae7536b\") " pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.008056 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93c5dea3-7d27-439b-8219-4178cae7536b-serving-cert\") pod \"route-controller-manager-5fc8cf4bff-4bkhf\" (UID: \"93c5dea3-7d27-439b-8219-4178cae7536b\") " pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.008078 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d91175f3-23aa-43ee-b193-3661c803b68c-serving-cert\") pod \"controller-manager-6986774b4f-4576f\" (UID: \"d91175f3-23aa-43ee-b193-3661c803b68c\") " pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.008102 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d91175f3-23aa-43ee-b193-3661c803b68c-client-ca\") pod \"controller-manager-6986774b4f-4576f\" (UID: \"d91175f3-23aa-43ee-b193-3661c803b68c\") " pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.008120 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bl7kk\" (UniqueName: \"kubernetes.io/projected/d91175f3-23aa-43ee-b193-3661c803b68c-kube-api-access-bl7kk\") pod \"controller-manager-6986774b4f-4576f\" (UID: \"d91175f3-23aa-43ee-b193-3661c803b68c\") " pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.008146 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d91175f3-23aa-43ee-b193-3661c803b68c-proxy-ca-bundles\") pod \"controller-manager-6986774b4f-4576f\" (UID: \"d91175f3-23aa-43ee-b193-3661c803b68c\") " pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.008167 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93c5dea3-7d27-439b-8219-4178cae7536b-config\") pod \"route-controller-manager-5fc8cf4bff-4bkhf\" (UID: \"93c5dea3-7d27-439b-8219-4178cae7536b\") " pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.009652 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d91175f3-23aa-43ee-b193-3661c803b68c-client-ca\") pod \"controller-manager-6986774b4f-4576f\" (UID: \"d91175f3-23aa-43ee-b193-3661c803b68c\") " pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.009712 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93c5dea3-7d27-439b-8219-4178cae7536b-config\") pod \"route-controller-manager-5fc8cf4bff-4bkhf\" (UID: \"93c5dea3-7d27-439b-8219-4178cae7536b\") " pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.009772 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/93c5dea3-7d27-439b-8219-4178cae7536b-client-ca\") pod \"route-controller-manager-5fc8cf4bff-4bkhf\" (UID: \"93c5dea3-7d27-439b-8219-4178cae7536b\") " pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.010854 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d91175f3-23aa-43ee-b193-3661c803b68c-proxy-ca-bundles\") pod \"controller-manager-6986774b4f-4576f\" (UID: \"d91175f3-23aa-43ee-b193-3661c803b68c\") " pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.011508 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d91175f3-23aa-43ee-b193-3661c803b68c-config\") pod \"controller-manager-6986774b4f-4576f\" (UID: \"d91175f3-23aa-43ee-b193-3661c803b68c\") " pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.014852 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d91175f3-23aa-43ee-b193-3661c803b68c-serving-cert\") pod \"controller-manager-6986774b4f-4576f\" (UID: \"d91175f3-23aa-43ee-b193-3661c803b68c\") " pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.015951 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93c5dea3-7d27-439b-8219-4178cae7536b-serving-cert\") pod \"route-controller-manager-5fc8cf4bff-4bkhf\" (UID: \"93c5dea3-7d27-439b-8219-4178cae7536b\") " pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.028355 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzkxv\" (UniqueName: \"kubernetes.io/projected/93c5dea3-7d27-439b-8219-4178cae7536b-kube-api-access-mzkxv\") pod \"route-controller-manager-5fc8cf4bff-4bkhf\" (UID: \"93c5dea3-7d27-439b-8219-4178cae7536b\") " pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.029369 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bl7kk\" (UniqueName: \"kubernetes.io/projected/d91175f3-23aa-43ee-b193-3661c803b68c-kube-api-access-bl7kk\") pod \"controller-manager-6986774b4f-4576f\" (UID: \"d91175f3-23aa-43ee-b193-3661c803b68c\") " pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.170795 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.186141 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.563419 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="115f142e-6788-4c2d-a9ca-98e832c80810" path="/var/lib/kubelet/pods/115f142e-6788-4c2d-a9ca-98e832c80810/volumes" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.564417 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5308e4bc-2457-46dd-899d-403fc81a4fee" path="/var/lib/kubelet/pods/5308e4bc-2457-46dd-899d-403fc81a4fee/volumes" Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.624638 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf"] Feb 27 08:36:28 crc kubenswrapper[4906]: W0227 08:36:28.627753 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod93c5dea3_7d27_439b_8219_4178cae7536b.slice/crio-0372a1ed7b222634d034dfa2e35ee068a0754c7d0c52e5878bdd43ad81f3c2fa WatchSource:0}: Error finding container 0372a1ed7b222634d034dfa2e35ee068a0754c7d0c52e5878bdd43ad81f3c2fa: Status 404 returned error can't find the container with id 0372a1ed7b222634d034dfa2e35ee068a0754c7d0c52e5878bdd43ad81f3c2fa Feb 27 08:36:28 crc kubenswrapper[4906]: I0227 08:36:28.668509 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6986774b4f-4576f"] Feb 27 08:36:28 crc kubenswrapper[4906]: W0227 08:36:28.674918 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd91175f3_23aa_43ee_b193_3661c803b68c.slice/crio-4d424462f4d0a1196118af12db33ba2e9b6ae60fb2873c9a689d60ffa58cdbd6 WatchSource:0}: Error finding container 4d424462f4d0a1196118af12db33ba2e9b6ae60fb2873c9a689d60ffa58cdbd6: Status 404 returned error can't find the container with id 4d424462f4d0a1196118af12db33ba2e9b6ae60fb2873c9a689d60ffa58cdbd6 Feb 27 08:36:29 crc kubenswrapper[4906]: I0227 08:36:29.071928 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" event={"ID":"93c5dea3-7d27-439b-8219-4178cae7536b","Type":"ContainerStarted","Data":"3662fd3ce3c87e8e15c6be7caffc161aa66ff5e19e49679e8a1d49b690b01972"} Feb 27 08:36:29 crc kubenswrapper[4906]: I0227 08:36:29.072338 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" event={"ID":"93c5dea3-7d27-439b-8219-4178cae7536b","Type":"ContainerStarted","Data":"0372a1ed7b222634d034dfa2e35ee068a0754c7d0c52e5878bdd43ad81f3c2fa"} Feb 27 08:36:29 crc kubenswrapper[4906]: I0227 08:36:29.072355 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" Feb 27 08:36:29 crc kubenswrapper[4906]: I0227 08:36:29.074206 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" event={"ID":"d91175f3-23aa-43ee-b193-3661c803b68c","Type":"ContainerStarted","Data":"14be28afb066c56bb9ce969940fb09fb46862e1e985a58df3034960d4b7a56aa"} Feb 27 08:36:29 crc kubenswrapper[4906]: I0227 08:36:29.074255 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" event={"ID":"d91175f3-23aa-43ee-b193-3661c803b68c","Type":"ContainerStarted","Data":"4d424462f4d0a1196118af12db33ba2e9b6ae60fb2873c9a689d60ffa58cdbd6"} Feb 27 08:36:29 crc kubenswrapper[4906]: I0227 08:36:29.074471 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:29 crc kubenswrapper[4906]: I0227 08:36:29.077207 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" Feb 27 08:36:29 crc kubenswrapper[4906]: I0227 08:36:29.079052 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:29 crc kubenswrapper[4906]: I0227 08:36:29.092342 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" podStartSLOduration=4.092322145 podStartE2EDuration="4.092322145s" podCreationTimestamp="2026-02-27 08:36:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:36:29.089315226 +0000 UTC m=+487.483716836" watchObservedRunningTime="2026-02-27 08:36:29.092322145 +0000 UTC m=+487.486723745" Feb 27 08:36:29 crc kubenswrapper[4906]: I0227 08:36:29.109289 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" podStartSLOduration=4.109267558 podStartE2EDuration="4.109267558s" podCreationTimestamp="2026-02-27 08:36:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:36:29.104622946 +0000 UTC m=+487.499024556" watchObservedRunningTime="2026-02-27 08:36:29.109267558 +0000 UTC m=+487.503669168" Feb 27 08:36:31 crc kubenswrapper[4906]: I0227 08:36:31.339013 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" podUID="0f8b1512-2590-418e-8504-70ef3c1567b0" containerName="oauth-openshift" containerID="cri-o://6e506685611f14f68da96142cdd67c93409b4c3194638b4606b5cf9797c51d27" gracePeriod=15 Feb 27 08:36:31 crc kubenswrapper[4906]: I0227 08:36:31.942414 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.071139 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hn86w\" (UniqueName: \"kubernetes.io/projected/0f8b1512-2590-418e-8504-70ef3c1567b0-kube-api-access-hn86w\") pod \"0f8b1512-2590-418e-8504-70ef3c1567b0\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.071195 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-service-ca\") pod \"0f8b1512-2590-418e-8504-70ef3c1567b0\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.071246 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-idp-0-file-data\") pod \"0f8b1512-2590-418e-8504-70ef3c1567b0\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.071295 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-session\") pod \"0f8b1512-2590-418e-8504-70ef3c1567b0\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.071349 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-cliconfig\") pod \"0f8b1512-2590-418e-8504-70ef3c1567b0\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.071368 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-ocp-branding-template\") pod \"0f8b1512-2590-418e-8504-70ef3c1567b0\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.071394 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0f8b1512-2590-418e-8504-70ef3c1567b0-audit-dir\") pod \"0f8b1512-2590-418e-8504-70ef3c1567b0\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.071416 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-template-provider-selection\") pod \"0f8b1512-2590-418e-8504-70ef3c1567b0\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.071436 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-router-certs\") pod \"0f8b1512-2590-418e-8504-70ef3c1567b0\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.071472 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-serving-cert\") pod \"0f8b1512-2590-418e-8504-70ef3c1567b0\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.071493 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-template-login\") pod \"0f8b1512-2590-418e-8504-70ef3c1567b0\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.071513 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-audit-policies\") pod \"0f8b1512-2590-418e-8504-70ef3c1567b0\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.071545 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-trusted-ca-bundle\") pod \"0f8b1512-2590-418e-8504-70ef3c1567b0\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.071576 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-template-error\") pod \"0f8b1512-2590-418e-8504-70ef3c1567b0\" (UID: \"0f8b1512-2590-418e-8504-70ef3c1567b0\") " Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.073024 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "0f8b1512-2590-418e-8504-70ef3c1567b0" (UID: "0f8b1512-2590-418e-8504-70ef3c1567b0"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.073168 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "0f8b1512-2590-418e-8504-70ef3c1567b0" (UID: "0f8b1512-2590-418e-8504-70ef3c1567b0"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.073421 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "0f8b1512-2590-418e-8504-70ef3c1567b0" (UID: "0f8b1512-2590-418e-8504-70ef3c1567b0"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.073790 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0f8b1512-2590-418e-8504-70ef3c1567b0-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "0f8b1512-2590-418e-8504-70ef3c1567b0" (UID: "0f8b1512-2590-418e-8504-70ef3c1567b0"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.074175 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "0f8b1512-2590-418e-8504-70ef3c1567b0" (UID: "0f8b1512-2590-418e-8504-70ef3c1567b0"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.079933 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "0f8b1512-2590-418e-8504-70ef3c1567b0" (UID: "0f8b1512-2590-418e-8504-70ef3c1567b0"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.080182 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f8b1512-2590-418e-8504-70ef3c1567b0-kube-api-access-hn86w" (OuterVolumeSpecName: "kube-api-access-hn86w") pod "0f8b1512-2590-418e-8504-70ef3c1567b0" (UID: "0f8b1512-2590-418e-8504-70ef3c1567b0"). InnerVolumeSpecName "kube-api-access-hn86w". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.080199 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "0f8b1512-2590-418e-8504-70ef3c1567b0" (UID: "0f8b1512-2590-418e-8504-70ef3c1567b0"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.080500 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "0f8b1512-2590-418e-8504-70ef3c1567b0" (UID: "0f8b1512-2590-418e-8504-70ef3c1567b0"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.080719 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "0f8b1512-2590-418e-8504-70ef3c1567b0" (UID: "0f8b1512-2590-418e-8504-70ef3c1567b0"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.081253 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "0f8b1512-2590-418e-8504-70ef3c1567b0" (UID: "0f8b1512-2590-418e-8504-70ef3c1567b0"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.084282 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "0f8b1512-2590-418e-8504-70ef3c1567b0" (UID: "0f8b1512-2590-418e-8504-70ef3c1567b0"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.084659 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "0f8b1512-2590-418e-8504-70ef3c1567b0" (UID: "0f8b1512-2590-418e-8504-70ef3c1567b0"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.085681 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "0f8b1512-2590-418e-8504-70ef3c1567b0" (UID: "0f8b1512-2590-418e-8504-70ef3c1567b0"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.098336 4906 generic.go:334] "Generic (PLEG): container finished" podID="0f8b1512-2590-418e-8504-70ef3c1567b0" containerID="6e506685611f14f68da96142cdd67c93409b4c3194638b4606b5cf9797c51d27" exitCode=0 Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.098414 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" event={"ID":"0f8b1512-2590-418e-8504-70ef3c1567b0","Type":"ContainerDied","Data":"6e506685611f14f68da96142cdd67c93409b4c3194638b4606b5cf9797c51d27"} Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.098487 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" event={"ID":"0f8b1512-2590-418e-8504-70ef3c1567b0","Type":"ContainerDied","Data":"fb57066c6a21ca243fe0a91e97560b10bf46391dd734f025e4cfe0615a8311a1"} Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.098514 4906 scope.go:117] "RemoveContainer" containerID="6e506685611f14f68da96142cdd67c93409b4c3194638b4606b5cf9797c51d27" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.098910 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-lz5rn" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.139102 4906 scope.go:117] "RemoveContainer" containerID="6e506685611f14f68da96142cdd67c93409b4c3194638b4606b5cf9797c51d27" Feb 27 08:36:32 crc kubenswrapper[4906]: E0227 08:36:32.139543 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e506685611f14f68da96142cdd67c93409b4c3194638b4606b5cf9797c51d27\": container with ID starting with 6e506685611f14f68da96142cdd67c93409b4c3194638b4606b5cf9797c51d27 not found: ID does not exist" containerID="6e506685611f14f68da96142cdd67c93409b4c3194638b4606b5cf9797c51d27" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.139579 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e506685611f14f68da96142cdd67c93409b4c3194638b4606b5cf9797c51d27"} err="failed to get container status \"6e506685611f14f68da96142cdd67c93409b4c3194638b4606b5cf9797c51d27\": rpc error: code = NotFound desc = could not find container \"6e506685611f14f68da96142cdd67c93409b4c3194638b4606b5cf9797c51d27\": container with ID starting with 6e506685611f14f68da96142cdd67c93409b4c3194638b4606b5cf9797c51d27 not found: ID does not exist" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.148843 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lz5rn"] Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.153253 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-lz5rn"] Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.173338 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.173385 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.173405 4906 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/0f8b1512-2590-418e-8504-70ef3c1567b0-audit-dir\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.173418 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.173430 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.173440 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.173453 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.173463 4906 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.173475 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.173486 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.173499 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hn86w\" (UniqueName: \"kubernetes.io/projected/0f8b1512-2590-418e-8504-70ef3c1567b0-kube-api-access-hn86w\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.173508 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.173519 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.173528 4906 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/0f8b1512-2590-418e-8504-70ef3c1567b0-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:32 crc kubenswrapper[4906]: I0227 08:36:32.558721 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f8b1512-2590-418e-8504-70ef3c1567b0" path="/var/lib/kubelet/pods/0f8b1512-2590-418e-8504-70ef3c1567b0/volumes" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.280644 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zb9z6"] Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.281473 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zb9z6" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" containerName="registry-server" containerID="cri-o://016ebd00b85fba4d56336cd46607b21d1cf2993aa7a0e9eedef9e9812b33ba0c" gracePeriod=2 Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.809712 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq"] Feb 27 08:36:34 crc kubenswrapper[4906]: E0227 08:36:34.810062 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f8b1512-2590-418e-8504-70ef3c1567b0" containerName="oauth-openshift" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.810082 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f8b1512-2590-418e-8504-70ef3c1567b0" containerName="oauth-openshift" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.810186 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f8b1512-2590-418e-8504-70ef3c1567b0" containerName="oauth-openshift" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.810598 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.815269 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.815646 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.816769 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.816930 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.817291 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.817431 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.817567 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.817925 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.818009 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.818941 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.819290 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.820001 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.826834 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.838273 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.849427 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zb9z6" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.859204 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq"] Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.873808 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.907991 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e44e0cc5-2800-489c-9bd8-0f06f15adfca-utilities\") pod \"e44e0cc5-2800-489c-9bd8-0f06f15adfca\" (UID: \"e44e0cc5-2800-489c-9bd8-0f06f15adfca\") " Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.908048 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjt79\" (UniqueName: \"kubernetes.io/projected/e44e0cc5-2800-489c-9bd8-0f06f15adfca-kube-api-access-xjt79\") pod \"e44e0cc5-2800-489c-9bd8-0f06f15adfca\" (UID: \"e44e0cc5-2800-489c-9bd8-0f06f15adfca\") " Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.908074 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e44e0cc5-2800-489c-9bd8-0f06f15adfca-catalog-content\") pod \"e44e0cc5-2800-489c-9bd8-0f06f15adfca\" (UID: \"e44e0cc5-2800-489c-9bd8-0f06f15adfca\") " Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.909388 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-router-certs\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.909480 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-service-ca\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.909526 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-user-template-login\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.909620 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-user-template-error\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.909690 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.909760 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.909784 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.909811 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b97681bc-b8d2-4ddf-805c-962919da3b84-audit-policies\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.909850 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njjpx\" (UniqueName: \"kubernetes.io/projected/b97681bc-b8d2-4ddf-805c-962919da3b84-kube-api-access-njjpx\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.909896 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b97681bc-b8d2-4ddf-805c-962919da3b84-audit-dir\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.909958 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.910025 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-cliconfig\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.910063 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-session\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.910100 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-serving-cert\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.910243 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e44e0cc5-2800-489c-9bd8-0f06f15adfca-utilities" (OuterVolumeSpecName: "utilities") pod "e44e0cc5-2800-489c-9bd8-0f06f15adfca" (UID: "e44e0cc5-2800-489c-9bd8-0f06f15adfca"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:36:34 crc kubenswrapper[4906]: I0227 08:36:34.922153 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e44e0cc5-2800-489c-9bd8-0f06f15adfca-kube-api-access-xjt79" (OuterVolumeSpecName: "kube-api-access-xjt79") pod "e44e0cc5-2800-489c-9bd8-0f06f15adfca" (UID: "e44e0cc5-2800-489c-9bd8-0f06f15adfca"). InnerVolumeSpecName "kube-api-access-xjt79". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.011951 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njjpx\" (UniqueName: \"kubernetes.io/projected/b97681bc-b8d2-4ddf-805c-962919da3b84-kube-api-access-njjpx\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.012010 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b97681bc-b8d2-4ddf-805c-962919da3b84-audit-dir\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.012041 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.012073 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-cliconfig\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.012097 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-session\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.012119 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-serving-cert\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.012140 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-router-certs\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.012165 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-service-ca\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.012189 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-user-template-login\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.012210 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-user-template-error\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.012230 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.012260 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.012274 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.012292 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b97681bc-b8d2-4ddf-805c-962919da3b84-audit-policies\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.012336 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e44e0cc5-2800-489c-9bd8-0f06f15adfca-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.012348 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjt79\" (UniqueName: \"kubernetes.io/projected/e44e0cc5-2800-489c-9bd8-0f06f15adfca-kube-api-access-xjt79\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.013504 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/b97681bc-b8d2-4ddf-805c-962919da3b84-audit-policies\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.013518 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-cliconfig\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.013595 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b97681bc-b8d2-4ddf-805c-962919da3b84-audit-dir\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.014486 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.015813 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-service-ca\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.015980 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-serving-cert\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.016115 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.019030 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-router-certs\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.021637 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.024298 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-user-template-error\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.024387 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-system-session\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.026634 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.028550 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/b97681bc-b8d2-4ddf-805c-962919da3b84-v4-0-config-user-template-login\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.031581 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e44e0cc5-2800-489c-9bd8-0f06f15adfca-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e44e0cc5-2800-489c-9bd8-0f06f15adfca" (UID: "e44e0cc5-2800-489c-9bd8-0f06f15adfca"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.031667 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njjpx\" (UniqueName: \"kubernetes.io/projected/b97681bc-b8d2-4ddf-805c-962919da3b84-kube-api-access-njjpx\") pod \"oauth-openshift-d4fb4cb6f-z6jkq\" (UID: \"b97681bc-b8d2-4ddf-805c-962919da3b84\") " pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.115424 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e44e0cc5-2800-489c-9bd8-0f06f15adfca-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.121678 4906 generic.go:334] "Generic (PLEG): container finished" podID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" containerID="016ebd00b85fba4d56336cd46607b21d1cf2993aa7a0e9eedef9e9812b33ba0c" exitCode=0 Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.121732 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zb9z6" event={"ID":"e44e0cc5-2800-489c-9bd8-0f06f15adfca","Type":"ContainerDied","Data":"016ebd00b85fba4d56336cd46607b21d1cf2993aa7a0e9eedef9e9812b33ba0c"} Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.121776 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zb9z6" event={"ID":"e44e0cc5-2800-489c-9bd8-0f06f15adfca","Type":"ContainerDied","Data":"3b41d1e48adc6e116fd4639711d06364678bce6079d671512848c3a9862b7be5"} Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.121802 4906 scope.go:117] "RemoveContainer" containerID="016ebd00b85fba4d56336cd46607b21d1cf2993aa7a0e9eedef9e9812b33ba0c" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.121815 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zb9z6" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.143021 4906 scope.go:117] "RemoveContainer" containerID="04c8ff27e5c84ce301dbc3b9ac86b5635c816102f10c04d05432050cb6b9bc84" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.148270 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.158713 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zb9z6"] Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.168401 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zb9z6"] Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.173813 4906 scope.go:117] "RemoveContainer" containerID="9f333fdb2ded6e82989ca607d8550981b1d2f8d7fe1743bfb7d97edc458672ae" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.191867 4906 scope.go:117] "RemoveContainer" containerID="016ebd00b85fba4d56336cd46607b21d1cf2993aa7a0e9eedef9e9812b33ba0c" Feb 27 08:36:35 crc kubenswrapper[4906]: E0227 08:36:35.192487 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"016ebd00b85fba4d56336cd46607b21d1cf2993aa7a0e9eedef9e9812b33ba0c\": container with ID starting with 016ebd00b85fba4d56336cd46607b21d1cf2993aa7a0e9eedef9e9812b33ba0c not found: ID does not exist" containerID="016ebd00b85fba4d56336cd46607b21d1cf2993aa7a0e9eedef9e9812b33ba0c" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.192591 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"016ebd00b85fba4d56336cd46607b21d1cf2993aa7a0e9eedef9e9812b33ba0c"} err="failed to get container status \"016ebd00b85fba4d56336cd46607b21d1cf2993aa7a0e9eedef9e9812b33ba0c\": rpc error: code = NotFound desc = could not find container \"016ebd00b85fba4d56336cd46607b21d1cf2993aa7a0e9eedef9e9812b33ba0c\": container with ID starting with 016ebd00b85fba4d56336cd46607b21d1cf2993aa7a0e9eedef9e9812b33ba0c not found: ID does not exist" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.192699 4906 scope.go:117] "RemoveContainer" containerID="04c8ff27e5c84ce301dbc3b9ac86b5635c816102f10c04d05432050cb6b9bc84" Feb 27 08:36:35 crc kubenswrapper[4906]: E0227 08:36:35.193111 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04c8ff27e5c84ce301dbc3b9ac86b5635c816102f10c04d05432050cb6b9bc84\": container with ID starting with 04c8ff27e5c84ce301dbc3b9ac86b5635c816102f10c04d05432050cb6b9bc84 not found: ID does not exist" containerID="04c8ff27e5c84ce301dbc3b9ac86b5635c816102f10c04d05432050cb6b9bc84" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.193192 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04c8ff27e5c84ce301dbc3b9ac86b5635c816102f10c04d05432050cb6b9bc84"} err="failed to get container status \"04c8ff27e5c84ce301dbc3b9ac86b5635c816102f10c04d05432050cb6b9bc84\": rpc error: code = NotFound desc = could not find container \"04c8ff27e5c84ce301dbc3b9ac86b5635c816102f10c04d05432050cb6b9bc84\": container with ID starting with 04c8ff27e5c84ce301dbc3b9ac86b5635c816102f10c04d05432050cb6b9bc84 not found: ID does not exist" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.193287 4906 scope.go:117] "RemoveContainer" containerID="9f333fdb2ded6e82989ca607d8550981b1d2f8d7fe1743bfb7d97edc458672ae" Feb 27 08:36:35 crc kubenswrapper[4906]: E0227 08:36:35.193823 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9f333fdb2ded6e82989ca607d8550981b1d2f8d7fe1743bfb7d97edc458672ae\": container with ID starting with 9f333fdb2ded6e82989ca607d8550981b1d2f8d7fe1743bfb7d97edc458672ae not found: ID does not exist" containerID="9f333fdb2ded6e82989ca607d8550981b1d2f8d7fe1743bfb7d97edc458672ae" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.193926 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9f333fdb2ded6e82989ca607d8550981b1d2f8d7fe1743bfb7d97edc458672ae"} err="failed to get container status \"9f333fdb2ded6e82989ca607d8550981b1d2f8d7fe1743bfb7d97edc458672ae\": rpc error: code = NotFound desc = could not find container \"9f333fdb2ded6e82989ca607d8550981b1d2f8d7fe1743bfb7d97edc458672ae\": container with ID starting with 9f333fdb2ded6e82989ca607d8550981b1d2f8d7fe1743bfb7d97edc458672ae not found: ID does not exist" Feb 27 08:36:35 crc kubenswrapper[4906]: I0227 08:36:35.674019 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq"] Feb 27 08:36:36 crc kubenswrapper[4906]: I0227 08:36:36.128248 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" event={"ID":"b97681bc-b8d2-4ddf-805c-962919da3b84","Type":"ContainerStarted","Data":"38a0a6e5be2b7bb39efab6958493ef8d29d7ed00821fce7237a752e27a35062d"} Feb 27 08:36:36 crc kubenswrapper[4906]: I0227 08:36:36.128306 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" event={"ID":"b97681bc-b8d2-4ddf-805c-962919da3b84","Type":"ContainerStarted","Data":"d3b4b5ee55d94e6782d39d4cfc1f4aa9597f64f763aaed0544d260f8e365f66a"} Feb 27 08:36:36 crc kubenswrapper[4906]: I0227 08:36:36.129120 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:36 crc kubenswrapper[4906]: I0227 08:36:36.152645 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" podStartSLOduration=30.152624503 podStartE2EDuration="30.152624503s" podCreationTimestamp="2026-02-27 08:36:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:36:36.150000355 +0000 UTC m=+494.544401965" watchObservedRunningTime="2026-02-27 08:36:36.152624503 +0000 UTC m=+494.547026113" Feb 27 08:36:36 crc kubenswrapper[4906]: I0227 08:36:36.498713 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-d4fb4cb6f-z6jkq" Feb 27 08:36:36 crc kubenswrapper[4906]: I0227 08:36:36.560720 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" path="/var/lib/kubelet/pods/e44e0cc5-2800-489c-9bd8-0f06f15adfca/volumes" Feb 27 08:36:45 crc kubenswrapper[4906]: I0227 08:36:45.769751 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6986774b4f-4576f"] Feb 27 08:36:45 crc kubenswrapper[4906]: I0227 08:36:45.770859 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" podUID="d91175f3-23aa-43ee-b193-3661c803b68c" containerName="controller-manager" containerID="cri-o://14be28afb066c56bb9ce969940fb09fb46862e1e985a58df3034960d4b7a56aa" gracePeriod=30 Feb 27 08:36:45 crc kubenswrapper[4906]: I0227 08:36:45.866995 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf"] Feb 27 08:36:45 crc kubenswrapper[4906]: I0227 08:36:45.867298 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" podUID="93c5dea3-7d27-439b-8219-4178cae7536b" containerName="route-controller-manager" containerID="cri-o://3662fd3ce3c87e8e15c6be7caffc161aa66ff5e19e49679e8a1d49b690b01972" gracePeriod=30 Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.197975 4906 generic.go:334] "Generic (PLEG): container finished" podID="93c5dea3-7d27-439b-8219-4178cae7536b" containerID="3662fd3ce3c87e8e15c6be7caffc161aa66ff5e19e49679e8a1d49b690b01972" exitCode=0 Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.198540 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" event={"ID":"93c5dea3-7d27-439b-8219-4178cae7536b","Type":"ContainerDied","Data":"3662fd3ce3c87e8e15c6be7caffc161aa66ff5e19e49679e8a1d49b690b01972"} Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.201744 4906 generic.go:334] "Generic (PLEG): container finished" podID="d91175f3-23aa-43ee-b193-3661c803b68c" containerID="14be28afb066c56bb9ce969940fb09fb46862e1e985a58df3034960d4b7a56aa" exitCode=0 Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.201838 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" event={"ID":"d91175f3-23aa-43ee-b193-3661c803b68c","Type":"ContainerDied","Data":"14be28afb066c56bb9ce969940fb09fb46862e1e985a58df3034960d4b7a56aa"} Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.442088 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.510039 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.567756 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d91175f3-23aa-43ee-b193-3661c803b68c-proxy-ca-bundles\") pod \"d91175f3-23aa-43ee-b193-3661c803b68c\" (UID: \"d91175f3-23aa-43ee-b193-3661c803b68c\") " Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.567828 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d91175f3-23aa-43ee-b193-3661c803b68c-serving-cert\") pod \"d91175f3-23aa-43ee-b193-3661c803b68c\" (UID: \"d91175f3-23aa-43ee-b193-3661c803b68c\") " Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.567851 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mzkxv\" (UniqueName: \"kubernetes.io/projected/93c5dea3-7d27-439b-8219-4178cae7536b-kube-api-access-mzkxv\") pod \"93c5dea3-7d27-439b-8219-4178cae7536b\" (UID: \"93c5dea3-7d27-439b-8219-4178cae7536b\") " Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.568039 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bl7kk\" (UniqueName: \"kubernetes.io/projected/d91175f3-23aa-43ee-b193-3661c803b68c-kube-api-access-bl7kk\") pod \"d91175f3-23aa-43ee-b193-3661c803b68c\" (UID: \"d91175f3-23aa-43ee-b193-3661c803b68c\") " Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.568066 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d91175f3-23aa-43ee-b193-3661c803b68c-client-ca\") pod \"d91175f3-23aa-43ee-b193-3661c803b68c\" (UID: \"d91175f3-23aa-43ee-b193-3661c803b68c\") " Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.568091 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/93c5dea3-7d27-439b-8219-4178cae7536b-client-ca\") pod \"93c5dea3-7d27-439b-8219-4178cae7536b\" (UID: \"93c5dea3-7d27-439b-8219-4178cae7536b\") " Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.568113 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93c5dea3-7d27-439b-8219-4178cae7536b-serving-cert\") pod \"93c5dea3-7d27-439b-8219-4178cae7536b\" (UID: \"93c5dea3-7d27-439b-8219-4178cae7536b\") " Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.568141 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d91175f3-23aa-43ee-b193-3661c803b68c-config\") pod \"d91175f3-23aa-43ee-b193-3661c803b68c\" (UID: \"d91175f3-23aa-43ee-b193-3661c803b68c\") " Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.568177 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93c5dea3-7d27-439b-8219-4178cae7536b-config\") pod \"93c5dea3-7d27-439b-8219-4178cae7536b\" (UID: \"93c5dea3-7d27-439b-8219-4178cae7536b\") " Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.568983 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d91175f3-23aa-43ee-b193-3661c803b68c-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "d91175f3-23aa-43ee-b193-3661c803b68c" (UID: "d91175f3-23aa-43ee-b193-3661c803b68c"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.569130 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93c5dea3-7d27-439b-8219-4178cae7536b-config" (OuterVolumeSpecName: "config") pod "93c5dea3-7d27-439b-8219-4178cae7536b" (UID: "93c5dea3-7d27-439b-8219-4178cae7536b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.570040 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/93c5dea3-7d27-439b-8219-4178cae7536b-client-ca" (OuterVolumeSpecName: "client-ca") pod "93c5dea3-7d27-439b-8219-4178cae7536b" (UID: "93c5dea3-7d27-439b-8219-4178cae7536b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.570035 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d91175f3-23aa-43ee-b193-3661c803b68c-config" (OuterVolumeSpecName: "config") pod "d91175f3-23aa-43ee-b193-3661c803b68c" (UID: "d91175f3-23aa-43ee-b193-3661c803b68c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.570104 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d91175f3-23aa-43ee-b193-3661c803b68c-client-ca" (OuterVolumeSpecName: "client-ca") pod "d91175f3-23aa-43ee-b193-3661c803b68c" (UID: "d91175f3-23aa-43ee-b193-3661c803b68c"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.574947 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93c5dea3-7d27-439b-8219-4178cae7536b-kube-api-access-mzkxv" (OuterVolumeSpecName: "kube-api-access-mzkxv") pod "93c5dea3-7d27-439b-8219-4178cae7536b" (UID: "93c5dea3-7d27-439b-8219-4178cae7536b"). InnerVolumeSpecName "kube-api-access-mzkxv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.575060 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93c5dea3-7d27-439b-8219-4178cae7536b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "93c5dea3-7d27-439b-8219-4178cae7536b" (UID: "93c5dea3-7d27-439b-8219-4178cae7536b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.575154 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d91175f3-23aa-43ee-b193-3661c803b68c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "d91175f3-23aa-43ee-b193-3661c803b68c" (UID: "d91175f3-23aa-43ee-b193-3661c803b68c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.575797 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d91175f3-23aa-43ee-b193-3661c803b68c-kube-api-access-bl7kk" (OuterVolumeSpecName: "kube-api-access-bl7kk") pod "d91175f3-23aa-43ee-b193-3661c803b68c" (UID: "d91175f3-23aa-43ee-b193-3661c803b68c"). InnerVolumeSpecName "kube-api-access-bl7kk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.669281 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d91175f3-23aa-43ee-b193-3661c803b68c-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.669328 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/93c5dea3-7d27-439b-8219-4178cae7536b-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.669340 4906 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/d91175f3-23aa-43ee-b193-3661c803b68c-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.669352 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d91175f3-23aa-43ee-b193-3661c803b68c-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.669364 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mzkxv\" (UniqueName: \"kubernetes.io/projected/93c5dea3-7d27-439b-8219-4178cae7536b-kube-api-access-mzkxv\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.669373 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bl7kk\" (UniqueName: \"kubernetes.io/projected/d91175f3-23aa-43ee-b193-3661c803b68c-kube-api-access-bl7kk\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.669381 4906 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/d91175f3-23aa-43ee-b193-3661c803b68c-client-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.669389 4906 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/93c5dea3-7d27-439b-8219-4178cae7536b-client-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.669397 4906 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/93c5dea3-7d27-439b-8219-4178cae7536b-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.818704 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6db975cd94-76knt"] Feb 27 08:36:46 crc kubenswrapper[4906]: E0227 08:36:46.818982 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93c5dea3-7d27-439b-8219-4178cae7536b" containerName="route-controller-manager" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.818998 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="93c5dea3-7d27-439b-8219-4178cae7536b" containerName="route-controller-manager" Feb 27 08:36:46 crc kubenswrapper[4906]: E0227 08:36:46.819020 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" containerName="extract-utilities" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.819029 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" containerName="extract-utilities" Feb 27 08:36:46 crc kubenswrapper[4906]: E0227 08:36:46.819042 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" containerName="extract-content" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.819051 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" containerName="extract-content" Feb 27 08:36:46 crc kubenswrapper[4906]: E0227 08:36:46.819066 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" containerName="registry-server" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.819074 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" containerName="registry-server" Feb 27 08:36:46 crc kubenswrapper[4906]: E0227 08:36:46.819087 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91175f3-23aa-43ee-b193-3661c803b68c" containerName="controller-manager" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.819095 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91175f3-23aa-43ee-b193-3661c803b68c" containerName="controller-manager" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.819237 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91175f3-23aa-43ee-b193-3661c803b68c" containerName="controller-manager" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.819255 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="e44e0cc5-2800-489c-9bd8-0f06f15adfca" containerName="registry-server" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.819278 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="93c5dea3-7d27-439b-8219-4178cae7536b" containerName="route-controller-manager" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.821740 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.836920 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6db975cd94-76knt"] Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.872407 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4facc361-936e-4625-9929-5ac17308f0fd-serving-cert\") pod \"controller-manager-6db975cd94-76knt\" (UID: \"4facc361-936e-4625-9929-5ac17308f0fd\") " pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.872678 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skclt\" (UniqueName: \"kubernetes.io/projected/4facc361-936e-4625-9929-5ac17308f0fd-kube-api-access-skclt\") pod \"controller-manager-6db975cd94-76knt\" (UID: \"4facc361-936e-4625-9929-5ac17308f0fd\") " pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.872773 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4facc361-936e-4625-9929-5ac17308f0fd-client-ca\") pod \"controller-manager-6db975cd94-76knt\" (UID: \"4facc361-936e-4625-9929-5ac17308f0fd\") " pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.872933 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4facc361-936e-4625-9929-5ac17308f0fd-proxy-ca-bundles\") pod \"controller-manager-6db975cd94-76knt\" (UID: \"4facc361-936e-4625-9929-5ac17308f0fd\") " pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.873047 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4facc361-936e-4625-9929-5ac17308f0fd-config\") pod \"controller-manager-6db975cd94-76knt\" (UID: \"4facc361-936e-4625-9929-5ac17308f0fd\") " pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.975228 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4facc361-936e-4625-9929-5ac17308f0fd-serving-cert\") pod \"controller-manager-6db975cd94-76knt\" (UID: \"4facc361-936e-4625-9929-5ac17308f0fd\") " pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.975305 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skclt\" (UniqueName: \"kubernetes.io/projected/4facc361-936e-4625-9929-5ac17308f0fd-kube-api-access-skclt\") pod \"controller-manager-6db975cd94-76knt\" (UID: \"4facc361-936e-4625-9929-5ac17308f0fd\") " pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.975339 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4facc361-936e-4625-9929-5ac17308f0fd-client-ca\") pod \"controller-manager-6db975cd94-76knt\" (UID: \"4facc361-936e-4625-9929-5ac17308f0fd\") " pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.975385 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4facc361-936e-4625-9929-5ac17308f0fd-proxy-ca-bundles\") pod \"controller-manager-6db975cd94-76knt\" (UID: \"4facc361-936e-4625-9929-5ac17308f0fd\") " pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.975420 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4facc361-936e-4625-9929-5ac17308f0fd-config\") pod \"controller-manager-6db975cd94-76knt\" (UID: \"4facc361-936e-4625-9929-5ac17308f0fd\") " pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.977665 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/4facc361-936e-4625-9929-5ac17308f0fd-proxy-ca-bundles\") pod \"controller-manager-6db975cd94-76knt\" (UID: \"4facc361-936e-4625-9929-5ac17308f0fd\") " pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.977816 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4facc361-936e-4625-9929-5ac17308f0fd-config\") pod \"controller-manager-6db975cd94-76knt\" (UID: \"4facc361-936e-4625-9929-5ac17308f0fd\") " pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.978630 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4facc361-936e-4625-9929-5ac17308f0fd-client-ca\") pod \"controller-manager-6db975cd94-76knt\" (UID: \"4facc361-936e-4625-9929-5ac17308f0fd\") " pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.981293 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4facc361-936e-4625-9929-5ac17308f0fd-serving-cert\") pod \"controller-manager-6db975cd94-76knt\" (UID: \"4facc361-936e-4625-9929-5ac17308f0fd\") " pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" Feb 27 08:36:46 crc kubenswrapper[4906]: I0227 08:36:46.998522 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skclt\" (UniqueName: \"kubernetes.io/projected/4facc361-936e-4625-9929-5ac17308f0fd-kube-api-access-skclt\") pod \"controller-manager-6db975cd94-76knt\" (UID: \"4facc361-936e-4625-9929-5ac17308f0fd\") " pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.145166 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.213368 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.214189 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf" event={"ID":"93c5dea3-7d27-439b-8219-4178cae7536b","Type":"ContainerDied","Data":"0372a1ed7b222634d034dfa2e35ee068a0754c7d0c52e5878bdd43ad81f3c2fa"} Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.214304 4906 scope.go:117] "RemoveContainer" containerID="3662fd3ce3c87e8e15c6be7caffc161aa66ff5e19e49679e8a1d49b690b01972" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.218310 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" event={"ID":"d91175f3-23aa-43ee-b193-3661c803b68c","Type":"ContainerDied","Data":"4d424462f4d0a1196118af12db33ba2e9b6ae60fb2873c9a689d60ffa58cdbd6"} Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.218554 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6986774b4f-4576f" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.240423 4906 scope.go:117] "RemoveContainer" containerID="14be28afb066c56bb9ce969940fb09fb46862e1e985a58df3034960d4b7a56aa" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.248127 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf"] Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.251966 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5fc8cf4bff-4bkhf"] Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.272941 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6986774b4f-4576f"] Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.277467 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6986774b4f-4576f"] Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.573657 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6db975cd94-76knt"] Feb 27 08:36:47 crc kubenswrapper[4906]: W0227 08:36:47.582494 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4facc361_936e_4625_9929_5ac17308f0fd.slice/crio-b73eb77ac2236be4542fe01654ec56a2a8734e1e5fa91b398009e71ebce1272e WatchSource:0}: Error finding container b73eb77ac2236be4542fe01654ec56a2a8734e1e5fa91b398009e71ebce1272e: Status 404 returned error can't find the container with id b73eb77ac2236be4542fe01654ec56a2a8734e1e5fa91b398009e71ebce1272e Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.820767 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6"] Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.823837 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.828827 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.828993 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.828993 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.829326 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.829524 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.829978 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.833895 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6"] Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.885650 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2k74\" (UniqueName: \"kubernetes.io/projected/6d7e6a74-5827-4032-af68-c06f6b752588-kube-api-access-k2k74\") pod \"route-controller-manager-c94c75f4d-vjlz6\" (UID: \"6d7e6a74-5827-4032-af68-c06f6b752588\") " pod="openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.885711 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6d7e6a74-5827-4032-af68-c06f6b752588-client-ca\") pod \"route-controller-manager-c94c75f4d-vjlz6\" (UID: \"6d7e6a74-5827-4032-af68-c06f6b752588\") " pod="openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.885730 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d7e6a74-5827-4032-af68-c06f6b752588-serving-cert\") pod \"route-controller-manager-c94c75f4d-vjlz6\" (UID: \"6d7e6a74-5827-4032-af68-c06f6b752588\") " pod="openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.885785 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d7e6a74-5827-4032-af68-c06f6b752588-config\") pod \"route-controller-manager-c94c75f4d-vjlz6\" (UID: \"6d7e6a74-5827-4032-af68-c06f6b752588\") " pod="openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.986984 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2k74\" (UniqueName: \"kubernetes.io/projected/6d7e6a74-5827-4032-af68-c06f6b752588-kube-api-access-k2k74\") pod \"route-controller-manager-c94c75f4d-vjlz6\" (UID: \"6d7e6a74-5827-4032-af68-c06f6b752588\") " pod="openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.987053 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6d7e6a74-5827-4032-af68-c06f6b752588-client-ca\") pod \"route-controller-manager-c94c75f4d-vjlz6\" (UID: \"6d7e6a74-5827-4032-af68-c06f6b752588\") " pod="openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.987075 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d7e6a74-5827-4032-af68-c06f6b752588-serving-cert\") pod \"route-controller-manager-c94c75f4d-vjlz6\" (UID: \"6d7e6a74-5827-4032-af68-c06f6b752588\") " pod="openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.987126 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d7e6a74-5827-4032-af68-c06f6b752588-config\") pod \"route-controller-manager-c94c75f4d-vjlz6\" (UID: \"6d7e6a74-5827-4032-af68-c06f6b752588\") " pod="openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.988266 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6d7e6a74-5827-4032-af68-c06f6b752588-client-ca\") pod \"route-controller-manager-c94c75f4d-vjlz6\" (UID: \"6d7e6a74-5827-4032-af68-c06f6b752588\") " pod="openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.988500 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6d7e6a74-5827-4032-af68-c06f6b752588-config\") pod \"route-controller-manager-c94c75f4d-vjlz6\" (UID: \"6d7e6a74-5827-4032-af68-c06f6b752588\") " pod="openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6" Feb 27 08:36:47 crc kubenswrapper[4906]: I0227 08:36:47.996237 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6d7e6a74-5827-4032-af68-c06f6b752588-serving-cert\") pod \"route-controller-manager-c94c75f4d-vjlz6\" (UID: \"6d7e6a74-5827-4032-af68-c06f6b752588\") " pod="openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6" Feb 27 08:36:48 crc kubenswrapper[4906]: I0227 08:36:48.006063 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2k74\" (UniqueName: \"kubernetes.io/projected/6d7e6a74-5827-4032-af68-c06f6b752588-kube-api-access-k2k74\") pod \"route-controller-manager-c94c75f4d-vjlz6\" (UID: \"6d7e6a74-5827-4032-af68-c06f6b752588\") " pod="openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6" Feb 27 08:36:48 crc kubenswrapper[4906]: I0227 08:36:48.145686 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6" Feb 27 08:36:48 crc kubenswrapper[4906]: I0227 08:36:48.243543 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" event={"ID":"4facc361-936e-4625-9929-5ac17308f0fd","Type":"ContainerStarted","Data":"28b5c2aa0ca8f1ef01c5f33882521ebb1be3476f01ef68b53247d5ba96f24f1e"} Feb 27 08:36:48 crc kubenswrapper[4906]: I0227 08:36:48.244047 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" event={"ID":"4facc361-936e-4625-9929-5ac17308f0fd","Type":"ContainerStarted","Data":"b73eb77ac2236be4542fe01654ec56a2a8734e1e5fa91b398009e71ebce1272e"} Feb 27 08:36:48 crc kubenswrapper[4906]: I0227 08:36:48.245387 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" Feb 27 08:36:48 crc kubenswrapper[4906]: I0227 08:36:48.252500 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" Feb 27 08:36:48 crc kubenswrapper[4906]: I0227 08:36:48.310182 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6db975cd94-76knt" podStartSLOduration=3.31015874 podStartE2EDuration="3.31015874s" podCreationTimestamp="2026-02-27 08:36:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:36:48.271343905 +0000 UTC m=+506.665745515" watchObservedRunningTime="2026-02-27 08:36:48.31015874 +0000 UTC m=+506.704560350" Feb 27 08:36:48 crc kubenswrapper[4906]: I0227 08:36:48.383313 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6"] Feb 27 08:36:48 crc kubenswrapper[4906]: I0227 08:36:48.561082 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93c5dea3-7d27-439b-8219-4178cae7536b" path="/var/lib/kubelet/pods/93c5dea3-7d27-439b-8219-4178cae7536b/volumes" Feb 27 08:36:48 crc kubenswrapper[4906]: I0227 08:36:48.562580 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d91175f3-23aa-43ee-b193-3661c803b68c" path="/var/lib/kubelet/pods/d91175f3-23aa-43ee-b193-3661c803b68c/volumes" Feb 27 08:36:49 crc kubenswrapper[4906]: I0227 08:36:49.269294 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6" event={"ID":"6d7e6a74-5827-4032-af68-c06f6b752588","Type":"ContainerStarted","Data":"edebc08ceff8096669f5bd0077aa77a593a877c1f880c8ed3ea7d8e2a8d8fe57"} Feb 27 08:36:49 crc kubenswrapper[4906]: I0227 08:36:49.269360 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6" event={"ID":"6d7e6a74-5827-4032-af68-c06f6b752588","Type":"ContainerStarted","Data":"349c380a7843f43c120c094dc095b22799df8b841aa05c8c916a859dd4648322"} Feb 27 08:36:49 crc kubenswrapper[4906]: I0227 08:36:49.285802 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6" podStartSLOduration=4.285780866 podStartE2EDuration="4.285780866s" podCreationTimestamp="2026-02-27 08:36:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:36:49.285139549 +0000 UTC m=+507.679541159" watchObservedRunningTime="2026-02-27 08:36:49.285780866 +0000 UTC m=+507.680182476" Feb 27 08:36:50 crc kubenswrapper[4906]: I0227 08:36:50.275698 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6" Feb 27 08:36:50 crc kubenswrapper[4906]: I0227 08:36:50.283265 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-c94c75f4d-vjlz6" Feb 27 08:36:54 crc kubenswrapper[4906]: I0227 08:36:54.844178 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:36:54 crc kubenswrapper[4906]: I0227 08:36:54.845247 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:36:54 crc kubenswrapper[4906]: I0227 08:36:54.845315 4906 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:36:54 crc kubenswrapper[4906]: I0227 08:36:54.846079 4906 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"432a2ec448d5afa76b89b67103131d632bfc1e942f1d1803d36030738d876711"} pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 27 08:36:54 crc kubenswrapper[4906]: I0227 08:36:54.846137 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" containerID="cri-o://432a2ec448d5afa76b89b67103131d632bfc1e942f1d1803d36030738d876711" gracePeriod=600 Feb 27 08:36:55 crc kubenswrapper[4906]: I0227 08:36:55.324137 4906 generic.go:334] "Generic (PLEG): container finished" podID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerID="432a2ec448d5afa76b89b67103131d632bfc1e942f1d1803d36030738d876711" exitCode=0 Feb 27 08:36:55 crc kubenswrapper[4906]: I0227 08:36:55.324345 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerDied","Data":"432a2ec448d5afa76b89b67103131d632bfc1e942f1d1803d36030738d876711"} Feb 27 08:36:55 crc kubenswrapper[4906]: I0227 08:36:55.324638 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerStarted","Data":"5279e6ebbd53a2bdf9a1dc599b1baf42994b0cc910b5f1d6e07817732e7c99c7"} Feb 27 08:36:55 crc kubenswrapper[4906]: I0227 08:36:55.324663 4906 scope.go:117] "RemoveContainer" containerID="4b341854493916537aca1f9a592bd011b0ecde6853ed875d52221c856cf428ca" Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.721537 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hw96v"] Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.722894 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-hw96v" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" containerName="registry-server" containerID="cri-o://32adfec94cacc4e4e7e2ae8ac3f06773f6cbe12050a6143319ed6757ce419d7b" gracePeriod=30 Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.739302 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-t5l4n"] Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.739632 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-t5l4n" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" containerName="registry-server" containerID="cri-o://e9651d07ea8cf43bc78da600ef1773aef8885b18ec5d27ba1d15c999ec4cc0a0" gracePeriod=30 Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.760127 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pqnpm"] Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.760377 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" podUID="739fb53a-c353-4113-9f21-062b6580a184" containerName="marketplace-operator" containerID="cri-o://e6959c9b0e612b767afbb03e1d795075496bf62768e586a5174ad8286561587e" gracePeriod=30 Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.767730 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-knxlc"] Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.768134 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-knxlc" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" containerName="registry-server" containerID="cri-o://3b9e45621525121999c4e0d5415ce33e04f73cdf1d3f5aef94e2c68c27385ad6" gracePeriod=30 Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.772836 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-4cb5p"] Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.773771 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-4cb5p" Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.799484 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rtknh"] Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.799900 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rtknh" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" containerName="registry-server" containerID="cri-o://34f07bc951721eef2a395b938d1e1c55a3ea4abeeba47a2cfdc1768716dae7ac" gracePeriod=30 Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.805627 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-4cb5p"] Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.837266 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/93a85195-01f3-43e1-9a7e-7603a41b47a4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-4cb5p\" (UID: \"93a85195-01f3-43e1-9a7e-7603a41b47a4\") " pod="openshift-marketplace/marketplace-operator-79b997595-4cb5p" Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.837313 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkhmj\" (UniqueName: \"kubernetes.io/projected/93a85195-01f3-43e1-9a7e-7603a41b47a4-kube-api-access-mkhmj\") pod \"marketplace-operator-79b997595-4cb5p\" (UID: \"93a85195-01f3-43e1-9a7e-7603a41b47a4\") " pod="openshift-marketplace/marketplace-operator-79b997595-4cb5p" Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.837353 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/93a85195-01f3-43e1-9a7e-7603a41b47a4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-4cb5p\" (UID: \"93a85195-01f3-43e1-9a7e-7603a41b47a4\") " pod="openshift-marketplace/marketplace-operator-79b997595-4cb5p" Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.938628 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/93a85195-01f3-43e1-9a7e-7603a41b47a4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-4cb5p\" (UID: \"93a85195-01f3-43e1-9a7e-7603a41b47a4\") " pod="openshift-marketplace/marketplace-operator-79b997595-4cb5p" Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.938753 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/93a85195-01f3-43e1-9a7e-7603a41b47a4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-4cb5p\" (UID: \"93a85195-01f3-43e1-9a7e-7603a41b47a4\") " pod="openshift-marketplace/marketplace-operator-79b997595-4cb5p" Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.938789 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkhmj\" (UniqueName: \"kubernetes.io/projected/93a85195-01f3-43e1-9a7e-7603a41b47a4-kube-api-access-mkhmj\") pod \"marketplace-operator-79b997595-4cb5p\" (UID: \"93a85195-01f3-43e1-9a7e-7603a41b47a4\") " pod="openshift-marketplace/marketplace-operator-79b997595-4cb5p" Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.943682 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/93a85195-01f3-43e1-9a7e-7603a41b47a4-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-4cb5p\" (UID: \"93a85195-01f3-43e1-9a7e-7603a41b47a4\") " pod="openshift-marketplace/marketplace-operator-79b997595-4cb5p" Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.949708 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/93a85195-01f3-43e1-9a7e-7603a41b47a4-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-4cb5p\" (UID: \"93a85195-01f3-43e1-9a7e-7603a41b47a4\") " pod="openshift-marketplace/marketplace-operator-79b997595-4cb5p" Feb 27 08:37:11 crc kubenswrapper[4906]: I0227 08:37:11.960033 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkhmj\" (UniqueName: \"kubernetes.io/projected/93a85195-01f3-43e1-9a7e-7603a41b47a4-kube-api-access-mkhmj\") pod \"marketplace-operator-79b997595-4cb5p\" (UID: \"93a85195-01f3-43e1-9a7e-7603a41b47a4\") " pod="openshift-marketplace/marketplace-operator-79b997595-4cb5p" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.220168 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-4cb5p" Feb 27 08:37:12 crc kubenswrapper[4906]: E0227 08:37:12.278949 4906 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3b9e45621525121999c4e0d5415ce33e04f73cdf1d3f5aef94e2c68c27385ad6 is running failed: container process not found" containerID="3b9e45621525121999c4e0d5415ce33e04f73cdf1d3f5aef94e2c68c27385ad6" cmd=["grpc_health_probe","-addr=:50051"] Feb 27 08:37:12 crc kubenswrapper[4906]: E0227 08:37:12.279667 4906 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3b9e45621525121999c4e0d5415ce33e04f73cdf1d3f5aef94e2c68c27385ad6 is running failed: container process not found" containerID="3b9e45621525121999c4e0d5415ce33e04f73cdf1d3f5aef94e2c68c27385ad6" cmd=["grpc_health_probe","-addr=:50051"] Feb 27 08:37:12 crc kubenswrapper[4906]: E0227 08:37:12.280084 4906 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3b9e45621525121999c4e0d5415ce33e04f73cdf1d3f5aef94e2c68c27385ad6 is running failed: container process not found" containerID="3b9e45621525121999c4e0d5415ce33e04f73cdf1d3f5aef94e2c68c27385ad6" cmd=["grpc_health_probe","-addr=:50051"] Feb 27 08:37:12 crc kubenswrapper[4906]: E0227 08:37:12.280137 4906 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3b9e45621525121999c4e0d5415ce33e04f73cdf1d3f5aef94e2c68c27385ad6 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-knxlc" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" containerName="registry-server" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.373610 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hw96v" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.437494 4906 generic.go:334] "Generic (PLEG): container finished" podID="86ce64fc-356d-4172-b0c0-8074921dc727" containerID="e9651d07ea8cf43bc78da600ef1773aef8885b18ec5d27ba1d15c999ec4cc0a0" exitCode=0 Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.437594 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t5l4n" event={"ID":"86ce64fc-356d-4172-b0c0-8074921dc727","Type":"ContainerDied","Data":"e9651d07ea8cf43bc78da600ef1773aef8885b18ec5d27ba1d15c999ec4cc0a0"} Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.442519 4906 generic.go:334] "Generic (PLEG): container finished" podID="5deb1490-b634-484c-a7ea-56f3ee6cad31" containerID="34f07bc951721eef2a395b938d1e1c55a3ea4abeeba47a2cfdc1768716dae7ac" exitCode=0 Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.442604 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rtknh" event={"ID":"5deb1490-b634-484c-a7ea-56f3ee6cad31","Type":"ContainerDied","Data":"34f07bc951721eef2a395b938d1e1c55a3ea4abeeba47a2cfdc1768716dae7ac"} Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.448157 4906 generic.go:334] "Generic (PLEG): container finished" podID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" containerID="3b9e45621525121999c4e0d5415ce33e04f73cdf1d3f5aef94e2c68c27385ad6" exitCode=0 Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.448247 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-knxlc" event={"ID":"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7","Type":"ContainerDied","Data":"3b9e45621525121999c4e0d5415ce33e04f73cdf1d3f5aef94e2c68c27385ad6"} Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.448853 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9c97127-8fdd-40b2-8248-40df8c50e302-utilities\") pod \"f9c97127-8fdd-40b2-8248-40df8c50e302\" (UID: \"f9c97127-8fdd-40b2-8248-40df8c50e302\") " Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.450145 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9c97127-8fdd-40b2-8248-40df8c50e302-utilities" (OuterVolumeSpecName: "utilities") pod "f9c97127-8fdd-40b2-8248-40df8c50e302" (UID: "f9c97127-8fdd-40b2-8248-40df8c50e302"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.450615 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9c97127-8fdd-40b2-8248-40df8c50e302-catalog-content\") pod \"f9c97127-8fdd-40b2-8248-40df8c50e302\" (UID: \"f9c97127-8fdd-40b2-8248-40df8c50e302\") " Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.450804 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4w5lx\" (UniqueName: \"kubernetes.io/projected/f9c97127-8fdd-40b2-8248-40df8c50e302-kube-api-access-4w5lx\") pod \"f9c97127-8fdd-40b2-8248-40df8c50e302\" (UID: \"f9c97127-8fdd-40b2-8248-40df8c50e302\") " Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.450791 4906 generic.go:334] "Generic (PLEG): container finished" podID="739fb53a-c353-4113-9f21-062b6580a184" containerID="e6959c9b0e612b767afbb03e1d795075496bf62768e586a5174ad8286561587e" exitCode=0 Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.450835 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" event={"ID":"739fb53a-c353-4113-9f21-062b6580a184","Type":"ContainerDied","Data":"e6959c9b0e612b767afbb03e1d795075496bf62768e586a5174ad8286561587e"} Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.452029 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f9c97127-8fdd-40b2-8248-40df8c50e302-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.454596 4906 generic.go:334] "Generic (PLEG): container finished" podID="f9c97127-8fdd-40b2-8248-40df8c50e302" containerID="32adfec94cacc4e4e7e2ae8ac3f06773f6cbe12050a6143319ed6757ce419d7b" exitCode=0 Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.454646 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hw96v" event={"ID":"f9c97127-8fdd-40b2-8248-40df8c50e302","Type":"ContainerDied","Data":"32adfec94cacc4e4e7e2ae8ac3f06773f6cbe12050a6143319ed6757ce419d7b"} Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.454736 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-hw96v" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.455026 4906 scope.go:117] "RemoveContainer" containerID="32adfec94cacc4e4e7e2ae8ac3f06773f6cbe12050a6143319ed6757ce419d7b" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.455012 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-hw96v" event={"ID":"f9c97127-8fdd-40b2-8248-40df8c50e302","Type":"ContainerDied","Data":"a43e5ba780a0193025a5b3f83638aecae99bd0bb5cbb7f70e7fb802f7a745af9"} Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.457260 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9c97127-8fdd-40b2-8248-40df8c50e302-kube-api-access-4w5lx" (OuterVolumeSpecName: "kube-api-access-4w5lx") pod "f9c97127-8fdd-40b2-8248-40df8c50e302" (UID: "f9c97127-8fdd-40b2-8248-40df8c50e302"). InnerVolumeSpecName "kube-api-access-4w5lx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.491616 4906 scope.go:117] "RemoveContainer" containerID="a29fd5454bd1f4c2cc63cd1848bc242ade8481f08d382f1ac1ceac8e03668cd5" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.510222 4906 scope.go:117] "RemoveContainer" containerID="a49cda8c7e70fe90b08a40b0ab4d23bf0ee8494b17f8ce65f97a5f153975a489" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.530099 4906 scope.go:117] "RemoveContainer" containerID="32adfec94cacc4e4e7e2ae8ac3f06773f6cbe12050a6143319ed6757ce419d7b" Feb 27 08:37:12 crc kubenswrapper[4906]: E0227 08:37:12.530800 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32adfec94cacc4e4e7e2ae8ac3f06773f6cbe12050a6143319ed6757ce419d7b\": container with ID starting with 32adfec94cacc4e4e7e2ae8ac3f06773f6cbe12050a6143319ed6757ce419d7b not found: ID does not exist" containerID="32adfec94cacc4e4e7e2ae8ac3f06773f6cbe12050a6143319ed6757ce419d7b" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.530844 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32adfec94cacc4e4e7e2ae8ac3f06773f6cbe12050a6143319ed6757ce419d7b"} err="failed to get container status \"32adfec94cacc4e4e7e2ae8ac3f06773f6cbe12050a6143319ed6757ce419d7b\": rpc error: code = NotFound desc = could not find container \"32adfec94cacc4e4e7e2ae8ac3f06773f6cbe12050a6143319ed6757ce419d7b\": container with ID starting with 32adfec94cacc4e4e7e2ae8ac3f06773f6cbe12050a6143319ed6757ce419d7b not found: ID does not exist" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.530872 4906 scope.go:117] "RemoveContainer" containerID="a29fd5454bd1f4c2cc63cd1848bc242ade8481f08d382f1ac1ceac8e03668cd5" Feb 27 08:37:12 crc kubenswrapper[4906]: E0227 08:37:12.531302 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a29fd5454bd1f4c2cc63cd1848bc242ade8481f08d382f1ac1ceac8e03668cd5\": container with ID starting with a29fd5454bd1f4c2cc63cd1848bc242ade8481f08d382f1ac1ceac8e03668cd5 not found: ID does not exist" containerID="a29fd5454bd1f4c2cc63cd1848bc242ade8481f08d382f1ac1ceac8e03668cd5" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.531328 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a29fd5454bd1f4c2cc63cd1848bc242ade8481f08d382f1ac1ceac8e03668cd5"} err="failed to get container status \"a29fd5454bd1f4c2cc63cd1848bc242ade8481f08d382f1ac1ceac8e03668cd5\": rpc error: code = NotFound desc = could not find container \"a29fd5454bd1f4c2cc63cd1848bc242ade8481f08d382f1ac1ceac8e03668cd5\": container with ID starting with a29fd5454bd1f4c2cc63cd1848bc242ade8481f08d382f1ac1ceac8e03668cd5 not found: ID does not exist" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.531343 4906 scope.go:117] "RemoveContainer" containerID="a49cda8c7e70fe90b08a40b0ab4d23bf0ee8494b17f8ce65f97a5f153975a489" Feb 27 08:37:12 crc kubenswrapper[4906]: E0227 08:37:12.531640 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a49cda8c7e70fe90b08a40b0ab4d23bf0ee8494b17f8ce65f97a5f153975a489\": container with ID starting with a49cda8c7e70fe90b08a40b0ab4d23bf0ee8494b17f8ce65f97a5f153975a489 not found: ID does not exist" containerID="a49cda8c7e70fe90b08a40b0ab4d23bf0ee8494b17f8ce65f97a5f153975a489" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.531668 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a49cda8c7e70fe90b08a40b0ab4d23bf0ee8494b17f8ce65f97a5f153975a489"} err="failed to get container status \"a49cda8c7e70fe90b08a40b0ab4d23bf0ee8494b17f8ce65f97a5f153975a489\": rpc error: code = NotFound desc = could not find container \"a49cda8c7e70fe90b08a40b0ab4d23bf0ee8494b17f8ce65f97a5f153975a489\": container with ID starting with a49cda8c7e70fe90b08a40b0ab4d23bf0ee8494b17f8ce65f97a5f153975a489 not found: ID does not exist" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.585715 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4w5lx\" (UniqueName: \"kubernetes.io/projected/f9c97127-8fdd-40b2-8248-40df8c50e302-kube-api-access-4w5lx\") on node \"crc\" DevicePath \"\"" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.620206 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f9c97127-8fdd-40b2-8248-40df8c50e302-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f9c97127-8fdd-40b2-8248-40df8c50e302" (UID: "f9c97127-8fdd-40b2-8248-40df8c50e302"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.668840 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-knxlc" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.673174 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rtknh" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.684061 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.686498 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f9c97127-8fdd-40b2-8248-40df8c50e302-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.700285 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t5l4n" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.788010 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe1433ef-5f77-4598-bd41-4cd2da2a8bd7-catalog-content\") pod \"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7\" (UID: \"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7\") " Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.790661 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5deb1490-b634-484c-a7ea-56f3ee6cad31-utilities\") pod \"5deb1490-b634-484c-a7ea-56f3ee6cad31\" (UID: \"5deb1490-b634-484c-a7ea-56f3ee6cad31\") " Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.790710 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/739fb53a-c353-4113-9f21-062b6580a184-marketplace-operator-metrics\") pod \"739fb53a-c353-4113-9f21-062b6580a184\" (UID: \"739fb53a-c353-4113-9f21-062b6580a184\") " Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.790737 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2hwfk\" (UniqueName: \"kubernetes.io/projected/5deb1490-b634-484c-a7ea-56f3ee6cad31-kube-api-access-2hwfk\") pod \"5deb1490-b634-484c-a7ea-56f3ee6cad31\" (UID: \"5deb1490-b634-484c-a7ea-56f3ee6cad31\") " Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.790760 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86ce64fc-356d-4172-b0c0-8074921dc727-catalog-content\") pod \"86ce64fc-356d-4172-b0c0-8074921dc727\" (UID: \"86ce64fc-356d-4172-b0c0-8074921dc727\") " Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.790808 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe1433ef-5f77-4598-bd41-4cd2da2a8bd7-utilities\") pod \"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7\" (UID: \"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7\") " Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.790826 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5deb1490-b634-484c-a7ea-56f3ee6cad31-catalog-content\") pod \"5deb1490-b634-484c-a7ea-56f3ee6cad31\" (UID: \"5deb1490-b634-484c-a7ea-56f3ee6cad31\") " Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.790867 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tpxwg\" (UniqueName: \"kubernetes.io/projected/fe1433ef-5f77-4598-bd41-4cd2da2a8bd7-kube-api-access-tpxwg\") pod \"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7\" (UID: \"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7\") " Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.790910 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8n7kz\" (UniqueName: \"kubernetes.io/projected/86ce64fc-356d-4172-b0c0-8074921dc727-kube-api-access-8n7kz\") pod \"86ce64fc-356d-4172-b0c0-8074921dc727\" (UID: \"86ce64fc-356d-4172-b0c0-8074921dc727\") " Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.790938 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86ce64fc-356d-4172-b0c0-8074921dc727-utilities\") pod \"86ce64fc-356d-4172-b0c0-8074921dc727\" (UID: \"86ce64fc-356d-4172-b0c0-8074921dc727\") " Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.790965 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/739fb53a-c353-4113-9f21-062b6580a184-marketplace-trusted-ca\") pod \"739fb53a-c353-4113-9f21-062b6580a184\" (UID: \"739fb53a-c353-4113-9f21-062b6580a184\") " Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.790986 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r9z98\" (UniqueName: \"kubernetes.io/projected/739fb53a-c353-4113-9f21-062b6580a184-kube-api-access-r9z98\") pod \"739fb53a-c353-4113-9f21-062b6580a184\" (UID: \"739fb53a-c353-4113-9f21-062b6580a184\") " Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.791980 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe1433ef-5f77-4598-bd41-4cd2da2a8bd7-utilities" (OuterVolumeSpecName: "utilities") pod "fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" (UID: "fe1433ef-5f77-4598-bd41-4cd2da2a8bd7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.793811 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5deb1490-b634-484c-a7ea-56f3ee6cad31-utilities" (OuterVolumeSpecName: "utilities") pod "5deb1490-b634-484c-a7ea-56f3ee6cad31" (UID: "5deb1490-b634-484c-a7ea-56f3ee6cad31"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.798116 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86ce64fc-356d-4172-b0c0-8074921dc727-utilities" (OuterVolumeSpecName: "utilities") pod "86ce64fc-356d-4172-b0c0-8074921dc727" (UID: "86ce64fc-356d-4172-b0c0-8074921dc727"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.798172 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/739fb53a-c353-4113-9f21-062b6580a184-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "739fb53a-c353-4113-9f21-062b6580a184" (UID: "739fb53a-c353-4113-9f21-062b6580a184"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.798610 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86ce64fc-356d-4172-b0c0-8074921dc727-kube-api-access-8n7kz" (OuterVolumeSpecName: "kube-api-access-8n7kz") pod "86ce64fc-356d-4172-b0c0-8074921dc727" (UID: "86ce64fc-356d-4172-b0c0-8074921dc727"). InnerVolumeSpecName "kube-api-access-8n7kz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.798827 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-hw96v"] Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.802163 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-hw96v"] Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.805434 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/739fb53a-c353-4113-9f21-062b6580a184-kube-api-access-r9z98" (OuterVolumeSpecName: "kube-api-access-r9z98") pod "739fb53a-c353-4113-9f21-062b6580a184" (UID: "739fb53a-c353-4113-9f21-062b6580a184"). InnerVolumeSpecName "kube-api-access-r9z98". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.805546 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/739fb53a-c353-4113-9f21-062b6580a184-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "739fb53a-c353-4113-9f21-062b6580a184" (UID: "739fb53a-c353-4113-9f21-062b6580a184"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.805943 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe1433ef-5f77-4598-bd41-4cd2da2a8bd7-kube-api-access-tpxwg" (OuterVolumeSpecName: "kube-api-access-tpxwg") pod "fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" (UID: "fe1433ef-5f77-4598-bd41-4cd2da2a8bd7"). InnerVolumeSpecName "kube-api-access-tpxwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.807357 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5deb1490-b634-484c-a7ea-56f3ee6cad31-kube-api-access-2hwfk" (OuterVolumeSpecName: "kube-api-access-2hwfk") pod "5deb1490-b634-484c-a7ea-56f3ee6cad31" (UID: "5deb1490-b634-484c-a7ea-56f3ee6cad31"). InnerVolumeSpecName "kube-api-access-2hwfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.825613 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe1433ef-5f77-4598-bd41-4cd2da2a8bd7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" (UID: "fe1433ef-5f77-4598-bd41-4cd2da2a8bd7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.857184 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86ce64fc-356d-4172-b0c0-8074921dc727-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "86ce64fc-356d-4172-b0c0-8074921dc727" (UID: "86ce64fc-356d-4172-b0c0-8074921dc727"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.892591 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fe1433ef-5f77-4598-bd41-4cd2da2a8bd7-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.892640 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tpxwg\" (UniqueName: \"kubernetes.io/projected/fe1433ef-5f77-4598-bd41-4cd2da2a8bd7-kube-api-access-tpxwg\") on node \"crc\" DevicePath \"\"" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.892655 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8n7kz\" (UniqueName: \"kubernetes.io/projected/86ce64fc-356d-4172-b0c0-8074921dc727-kube-api-access-8n7kz\") on node \"crc\" DevicePath \"\"" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.892665 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86ce64fc-356d-4172-b0c0-8074921dc727-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.892676 4906 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/739fb53a-c353-4113-9f21-062b6580a184-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.892686 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r9z98\" (UniqueName: \"kubernetes.io/projected/739fb53a-c353-4113-9f21-062b6580a184-kube-api-access-r9z98\") on node \"crc\" DevicePath \"\"" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.892695 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fe1433ef-5f77-4598-bd41-4cd2da2a8bd7-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.892704 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5deb1490-b634-484c-a7ea-56f3ee6cad31-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.892713 4906 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/739fb53a-c353-4113-9f21-062b6580a184-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.892722 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2hwfk\" (UniqueName: \"kubernetes.io/projected/5deb1490-b634-484c-a7ea-56f3ee6cad31-kube-api-access-2hwfk\") on node \"crc\" DevicePath \"\"" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.892729 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86ce64fc-356d-4172-b0c0-8074921dc727-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.958286 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5deb1490-b634-484c-a7ea-56f3ee6cad31-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5deb1490-b634-484c-a7ea-56f3ee6cad31" (UID: "5deb1490-b634-484c-a7ea-56f3ee6cad31"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.971441 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-4cb5p"] Feb 27 08:37:12 crc kubenswrapper[4906]: I0227 08:37:12.994358 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5deb1490-b634-484c-a7ea-56f3ee6cad31-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.464928 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-t5l4n" event={"ID":"86ce64fc-356d-4172-b0c0-8074921dc727","Type":"ContainerDied","Data":"871eddc81a1199d87918b2bdf0f97755ff80fa04db1d8d6e51ad8f6d235cccd1"} Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.464979 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-t5l4n" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.465011 4906 scope.go:117] "RemoveContainer" containerID="e9651d07ea8cf43bc78da600ef1773aef8885b18ec5d27ba1d15c999ec4cc0a0" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.469118 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rtknh" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.469112 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rtknh" event={"ID":"5deb1490-b634-484c-a7ea-56f3ee6cad31","Type":"ContainerDied","Data":"d1c4a18f0eeb6e23969040685f2be6701dd1cb78268c06aff5db1c9fbd2bad00"} Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.472374 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-knxlc" event={"ID":"fe1433ef-5f77-4598-bd41-4cd2da2a8bd7","Type":"ContainerDied","Data":"be4cc70e989bfca66a059409ebd33b2db77fbd6bb5dab36d43e64832f9f53f31"} Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.472410 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-knxlc" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.478026 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" event={"ID":"739fb53a-c353-4113-9f21-062b6580a184","Type":"ContainerDied","Data":"957c2df71b8400755f9a57762177d510ed93466d7a4e76a3f0c1dd534e967206"} Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.478077 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-pqnpm" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.482874 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-4cb5p" event={"ID":"93a85195-01f3-43e1-9a7e-7603a41b47a4","Type":"ContainerStarted","Data":"aeff4a8d9fa801977e35c6d0aa905a6cc3106dcd2c9bed4bffed56f2b39c8edc"} Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.483122 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-4cb5p" event={"ID":"93a85195-01f3-43e1-9a7e-7603a41b47a4","Type":"ContainerStarted","Data":"f22b61ddc82416cae1038e2db0a7073e60979c1cfe7945f0d5b7e55ff16ee9cb"} Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.483370 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-4cb5p" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.484303 4906 scope.go:117] "RemoveContainer" containerID="d360e8203fe660a8293d8efee94c52488e144246e022a7ce807985e6decf868e" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.485020 4906 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-4cb5p container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.80:8080/healthz\": dial tcp 10.217.0.80:8080: connect: connection refused" start-of-body= Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.485084 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-4cb5p" podUID="93a85195-01f3-43e1-9a7e-7603a41b47a4" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.80:8080/healthz\": dial tcp 10.217.0.80:8080: connect: connection refused" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.511085 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-t5l4n"] Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.515244 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-t5l4n"] Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.530938 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-4cb5p" podStartSLOduration=2.530913457 podStartE2EDuration="2.530913457s" podCreationTimestamp="2026-02-27 08:37:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:37:13.527677172 +0000 UTC m=+531.922078782" watchObservedRunningTime="2026-02-27 08:37:13.530913457 +0000 UTC m=+531.925315067" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.534823 4906 scope.go:117] "RemoveContainer" containerID="c87b08fe2aeea170a41468a9766d1d6df7eaa5554e894312987a7211fa2d68e6" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.550041 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rtknh"] Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.554353 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rtknh"] Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.569408 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pqnpm"] Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.582255 4906 scope.go:117] "RemoveContainer" containerID="34f07bc951721eef2a395b938d1e1c55a3ea4abeeba47a2cfdc1768716dae7ac" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.586777 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-pqnpm"] Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.590788 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-knxlc"] Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.593634 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-knxlc"] Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.602983 4906 scope.go:117] "RemoveContainer" containerID="e9a566d1539ad42da990b4e2135ce2702c6c25f51d62ff1f6ef27dccfeea652a" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.622418 4906 scope.go:117] "RemoveContainer" containerID="1c73eb8e76d2340cba03dbfd1309382ebc7cedb5d18e9b987d0a9c974c6cc599" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.643129 4906 scope.go:117] "RemoveContainer" containerID="3b9e45621525121999c4e0d5415ce33e04f73cdf1d3f5aef94e2c68c27385ad6" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.663271 4906 scope.go:117] "RemoveContainer" containerID="688fea19ece1831380fad2b934d3f5beebcee622ec73837de3cd4b7ea7b1cecb" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.677331 4906 scope.go:117] "RemoveContainer" containerID="7042a6bc1e79ce29f0caa106466997b1b5d388b72e2b4561b207254ddac68a3b" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.691322 4906 scope.go:117] "RemoveContainer" containerID="e6959c9b0e612b767afbb03e1d795075496bf62768e586a5174ad8286561587e" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.944171 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-6bqtn"] Feb 27 08:37:13 crc kubenswrapper[4906]: E0227 08:37:13.944481 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" containerName="extract-content" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.944504 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" containerName="extract-content" Feb 27 08:37:13 crc kubenswrapper[4906]: E0227 08:37:13.944523 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" containerName="extract-utilities" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.944536 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" containerName="extract-utilities" Feb 27 08:37:13 crc kubenswrapper[4906]: E0227 08:37:13.944561 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" containerName="registry-server" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.944575 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" containerName="registry-server" Feb 27 08:37:13 crc kubenswrapper[4906]: E0227 08:37:13.944595 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="739fb53a-c353-4113-9f21-062b6580a184" containerName="marketplace-operator" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.944609 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="739fb53a-c353-4113-9f21-062b6580a184" containerName="marketplace-operator" Feb 27 08:37:13 crc kubenswrapper[4906]: E0227 08:37:13.944627 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" containerName="extract-content" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.944638 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" containerName="extract-content" Feb 27 08:37:13 crc kubenswrapper[4906]: E0227 08:37:13.944659 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" containerName="extract-utilities" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.944672 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" containerName="extract-utilities" Feb 27 08:37:13 crc kubenswrapper[4906]: E0227 08:37:13.944690 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" containerName="registry-server" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.944702 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" containerName="registry-server" Feb 27 08:37:13 crc kubenswrapper[4906]: E0227 08:37:13.944725 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" containerName="extract-utilities" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.944738 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" containerName="extract-utilities" Feb 27 08:37:13 crc kubenswrapper[4906]: E0227 08:37:13.944753 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" containerName="registry-server" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.944768 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" containerName="registry-server" Feb 27 08:37:13 crc kubenswrapper[4906]: E0227 08:37:13.944785 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" containerName="extract-content" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.944798 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" containerName="extract-content" Feb 27 08:37:13 crc kubenswrapper[4906]: E0227 08:37:13.944817 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" containerName="registry-server" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.944829 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" containerName="registry-server" Feb 27 08:37:13 crc kubenswrapper[4906]: E0227 08:37:13.944847 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" containerName="extract-content" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.944859 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" containerName="extract-content" Feb 27 08:37:13 crc kubenswrapper[4906]: E0227 08:37:13.944875 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" containerName="extract-utilities" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.944961 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" containerName="extract-utilities" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.945153 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" containerName="registry-server" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.945177 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="739fb53a-c353-4113-9f21-062b6580a184" containerName="marketplace-operator" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.945195 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" containerName="registry-server" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.945211 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" containerName="registry-server" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.945239 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" containerName="registry-server" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.946464 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6bqtn" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.950173 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 27 08:37:13 crc kubenswrapper[4906]: I0227 08:37:13.967141 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6bqtn"] Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.107738 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3-catalog-content\") pod \"certified-operators-6bqtn\" (UID: \"3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3\") " pod="openshift-marketplace/certified-operators-6bqtn" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.107822 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3-utilities\") pod \"certified-operators-6bqtn\" (UID: \"3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3\") " pod="openshift-marketplace/certified-operators-6bqtn" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.107849 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpvgf\" (UniqueName: \"kubernetes.io/projected/3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3-kube-api-access-kpvgf\") pod \"certified-operators-6bqtn\" (UID: \"3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3\") " pod="openshift-marketplace/certified-operators-6bqtn" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.162478 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-w5gxv"] Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.163941 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w5gxv" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.166514 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w5gxv"] Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.166720 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.209079 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3-catalog-content\") pod \"certified-operators-6bqtn\" (UID: \"3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3\") " pod="openshift-marketplace/certified-operators-6bqtn" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.209199 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3-utilities\") pod \"certified-operators-6bqtn\" (UID: \"3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3\") " pod="openshift-marketplace/certified-operators-6bqtn" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.209912 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3-utilities\") pod \"certified-operators-6bqtn\" (UID: \"3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3\") " pod="openshift-marketplace/certified-operators-6bqtn" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.209929 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3-catalog-content\") pod \"certified-operators-6bqtn\" (UID: \"3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3\") " pod="openshift-marketplace/certified-operators-6bqtn" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.209236 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpvgf\" (UniqueName: \"kubernetes.io/projected/3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3-kube-api-access-kpvgf\") pod \"certified-operators-6bqtn\" (UID: \"3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3\") " pod="openshift-marketplace/certified-operators-6bqtn" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.246179 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpvgf\" (UniqueName: \"kubernetes.io/projected/3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3-kube-api-access-kpvgf\") pod \"certified-operators-6bqtn\" (UID: \"3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3\") " pod="openshift-marketplace/certified-operators-6bqtn" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.296341 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-6bqtn" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.312392 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eebf875-9038-4025-bc93-6d759229f64c-utilities\") pod \"redhat-marketplace-w5gxv\" (UID: \"0eebf875-9038-4025-bc93-6d759229f64c\") " pod="openshift-marketplace/redhat-marketplace-w5gxv" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.312464 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eebf875-9038-4025-bc93-6d759229f64c-catalog-content\") pod \"redhat-marketplace-w5gxv\" (UID: \"0eebf875-9038-4025-bc93-6d759229f64c\") " pod="openshift-marketplace/redhat-marketplace-w5gxv" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.312495 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wszzx\" (UniqueName: \"kubernetes.io/projected/0eebf875-9038-4025-bc93-6d759229f64c-kube-api-access-wszzx\") pod \"redhat-marketplace-w5gxv\" (UID: \"0eebf875-9038-4025-bc93-6d759229f64c\") " pod="openshift-marketplace/redhat-marketplace-w5gxv" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.414032 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eebf875-9038-4025-bc93-6d759229f64c-catalog-content\") pod \"redhat-marketplace-w5gxv\" (UID: \"0eebf875-9038-4025-bc93-6d759229f64c\") " pod="openshift-marketplace/redhat-marketplace-w5gxv" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.414099 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wszzx\" (UniqueName: \"kubernetes.io/projected/0eebf875-9038-4025-bc93-6d759229f64c-kube-api-access-wszzx\") pod \"redhat-marketplace-w5gxv\" (UID: \"0eebf875-9038-4025-bc93-6d759229f64c\") " pod="openshift-marketplace/redhat-marketplace-w5gxv" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.414159 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eebf875-9038-4025-bc93-6d759229f64c-utilities\") pod \"redhat-marketplace-w5gxv\" (UID: \"0eebf875-9038-4025-bc93-6d759229f64c\") " pod="openshift-marketplace/redhat-marketplace-w5gxv" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.415228 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0eebf875-9038-4025-bc93-6d759229f64c-catalog-content\") pod \"redhat-marketplace-w5gxv\" (UID: \"0eebf875-9038-4025-bc93-6d759229f64c\") " pod="openshift-marketplace/redhat-marketplace-w5gxv" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.415302 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0eebf875-9038-4025-bc93-6d759229f64c-utilities\") pod \"redhat-marketplace-w5gxv\" (UID: \"0eebf875-9038-4025-bc93-6d759229f64c\") " pod="openshift-marketplace/redhat-marketplace-w5gxv" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.449281 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wszzx\" (UniqueName: \"kubernetes.io/projected/0eebf875-9038-4025-bc93-6d759229f64c-kube-api-access-wszzx\") pod \"redhat-marketplace-w5gxv\" (UID: \"0eebf875-9038-4025-bc93-6d759229f64c\") " pod="openshift-marketplace/redhat-marketplace-w5gxv" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.480764 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-w5gxv" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.505012 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-4cb5p" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.571019 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5deb1490-b634-484c-a7ea-56f3ee6cad31" path="/var/lib/kubelet/pods/5deb1490-b634-484c-a7ea-56f3ee6cad31/volumes" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.572141 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="739fb53a-c353-4113-9f21-062b6580a184" path="/var/lib/kubelet/pods/739fb53a-c353-4113-9f21-062b6580a184/volumes" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.572762 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86ce64fc-356d-4172-b0c0-8074921dc727" path="/var/lib/kubelet/pods/86ce64fc-356d-4172-b0c0-8074921dc727/volumes" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.574476 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9c97127-8fdd-40b2-8248-40df8c50e302" path="/var/lib/kubelet/pods/f9c97127-8fdd-40b2-8248-40df8c50e302/volumes" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.575283 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe1433ef-5f77-4598-bd41-4cd2da2a8bd7" path="/var/lib/kubelet/pods/fe1433ef-5f77-4598-bd41-4cd2da2a8bd7/volumes" Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.741315 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-6bqtn"] Feb 27 08:37:14 crc kubenswrapper[4906]: I0227 08:37:14.882983 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-w5gxv"] Feb 27 08:37:15 crc kubenswrapper[4906]: I0227 08:37:15.502299 4906 generic.go:334] "Generic (PLEG): container finished" podID="0eebf875-9038-4025-bc93-6d759229f64c" containerID="007e3e3029ea8e8586e74d88f66d84581862c15651ac686a21e1c1fdef9b4399" exitCode=0 Feb 27 08:37:15 crc kubenswrapper[4906]: I0227 08:37:15.502345 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w5gxv" event={"ID":"0eebf875-9038-4025-bc93-6d759229f64c","Type":"ContainerDied","Data":"007e3e3029ea8e8586e74d88f66d84581862c15651ac686a21e1c1fdef9b4399"} Feb 27 08:37:15 crc kubenswrapper[4906]: I0227 08:37:15.502871 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w5gxv" event={"ID":"0eebf875-9038-4025-bc93-6d759229f64c","Type":"ContainerStarted","Data":"4992260be659f72e09db40d007a8934e6dc5634c67516de89fcdffef6b7db232"} Feb 27 08:37:15 crc kubenswrapper[4906]: I0227 08:37:15.505821 4906 generic.go:334] "Generic (PLEG): container finished" podID="3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3" containerID="51ca7650f6787cf50d88ea71655cb2fa8bf222bb20b3ad5a8d1816ed87418133" exitCode=0 Feb 27 08:37:15 crc kubenswrapper[4906]: I0227 08:37:15.507918 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6bqtn" event={"ID":"3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3","Type":"ContainerDied","Data":"51ca7650f6787cf50d88ea71655cb2fa8bf222bb20b3ad5a8d1816ed87418133"} Feb 27 08:37:15 crc kubenswrapper[4906]: I0227 08:37:15.507991 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6bqtn" event={"ID":"3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3","Type":"ContainerStarted","Data":"e24546cba094d522ecc9c7b8efc8630cb8af44287f8b26f3a51beca717e1a1fe"} Feb 27 08:37:15 crc kubenswrapper[4906]: I0227 08:37:15.508779 4906 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.343515 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-nzkhk"] Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.344962 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nzkhk" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.349276 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nzkhk"] Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.373550 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.441004 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/104e053c-89a4-4b45-b02f-26a3d6b0191c-catalog-content\") pod \"community-operators-nzkhk\" (UID: \"104e053c-89a4-4b45-b02f-26a3d6b0191c\") " pod="openshift-marketplace/community-operators-nzkhk" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.441166 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9s8c7\" (UniqueName: \"kubernetes.io/projected/104e053c-89a4-4b45-b02f-26a3d6b0191c-kube-api-access-9s8c7\") pod \"community-operators-nzkhk\" (UID: \"104e053c-89a4-4b45-b02f-26a3d6b0191c\") " pod="openshift-marketplace/community-operators-nzkhk" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.441301 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/104e053c-89a4-4b45-b02f-26a3d6b0191c-utilities\") pod \"community-operators-nzkhk\" (UID: \"104e053c-89a4-4b45-b02f-26a3d6b0191c\") " pod="openshift-marketplace/community-operators-nzkhk" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.513909 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w5gxv" event={"ID":"0eebf875-9038-4025-bc93-6d759229f64c","Type":"ContainerStarted","Data":"b9167925de329113b4a39cf2eb9daf46db3bf3d26b50be5e9396ac288ce17372"} Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.538689 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jsxjh"] Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.541695 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jsxjh" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.542944 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/104e053c-89a4-4b45-b02f-26a3d6b0191c-catalog-content\") pod \"community-operators-nzkhk\" (UID: \"104e053c-89a4-4b45-b02f-26a3d6b0191c\") " pod="openshift-marketplace/community-operators-nzkhk" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.543020 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9s8c7\" (UniqueName: \"kubernetes.io/projected/104e053c-89a4-4b45-b02f-26a3d6b0191c-kube-api-access-9s8c7\") pod \"community-operators-nzkhk\" (UID: \"104e053c-89a4-4b45-b02f-26a3d6b0191c\") " pod="openshift-marketplace/community-operators-nzkhk" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.543060 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/104e053c-89a4-4b45-b02f-26a3d6b0191c-utilities\") pod \"community-operators-nzkhk\" (UID: \"104e053c-89a4-4b45-b02f-26a3d6b0191c\") " pod="openshift-marketplace/community-operators-nzkhk" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.543702 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/104e053c-89a4-4b45-b02f-26a3d6b0191c-utilities\") pod \"community-operators-nzkhk\" (UID: \"104e053c-89a4-4b45-b02f-26a3d6b0191c\") " pod="openshift-marketplace/community-operators-nzkhk" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.544057 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/104e053c-89a4-4b45-b02f-26a3d6b0191c-catalog-content\") pod \"community-operators-nzkhk\" (UID: \"104e053c-89a4-4b45-b02f-26a3d6b0191c\") " pod="openshift-marketplace/community-operators-nzkhk" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.545488 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.560182 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jsxjh"] Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.585115 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9s8c7\" (UniqueName: \"kubernetes.io/projected/104e053c-89a4-4b45-b02f-26a3d6b0191c-kube-api-access-9s8c7\") pod \"community-operators-nzkhk\" (UID: \"104e053c-89a4-4b45-b02f-26a3d6b0191c\") " pod="openshift-marketplace/community-operators-nzkhk" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.643836 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb-catalog-content\") pod \"redhat-operators-jsxjh\" (UID: \"3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb\") " pod="openshift-marketplace/redhat-operators-jsxjh" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.643943 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb-utilities\") pod \"redhat-operators-jsxjh\" (UID: \"3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb\") " pod="openshift-marketplace/redhat-operators-jsxjh" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.643977 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvqft\" (UniqueName: \"kubernetes.io/projected/3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb-kube-api-access-wvqft\") pod \"redhat-operators-jsxjh\" (UID: \"3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb\") " pod="openshift-marketplace/redhat-operators-jsxjh" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.721295 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nzkhk" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.745791 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb-catalog-content\") pod \"redhat-operators-jsxjh\" (UID: \"3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb\") " pod="openshift-marketplace/redhat-operators-jsxjh" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.745949 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb-utilities\") pod \"redhat-operators-jsxjh\" (UID: \"3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb\") " pod="openshift-marketplace/redhat-operators-jsxjh" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.746033 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvqft\" (UniqueName: \"kubernetes.io/projected/3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb-kube-api-access-wvqft\") pod \"redhat-operators-jsxjh\" (UID: \"3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb\") " pod="openshift-marketplace/redhat-operators-jsxjh" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.746560 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb-utilities\") pod \"redhat-operators-jsxjh\" (UID: \"3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb\") " pod="openshift-marketplace/redhat-operators-jsxjh" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.746694 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb-catalog-content\") pod \"redhat-operators-jsxjh\" (UID: \"3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb\") " pod="openshift-marketplace/redhat-operators-jsxjh" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.770793 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvqft\" (UniqueName: \"kubernetes.io/projected/3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb-kube-api-access-wvqft\") pod \"redhat-operators-jsxjh\" (UID: \"3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb\") " pod="openshift-marketplace/redhat-operators-jsxjh" Feb 27 08:37:16 crc kubenswrapper[4906]: I0227 08:37:16.938981 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jsxjh" Feb 27 08:37:17 crc kubenswrapper[4906]: I0227 08:37:17.136659 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nzkhk"] Feb 27 08:37:17 crc kubenswrapper[4906]: W0227 08:37:17.155727 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod104e053c_89a4_4b45_b02f_26a3d6b0191c.slice/crio-009b99edd6555f3dc76cac8fe03fddf7d131e2648dba9d5e50c3475df3ff2be6 WatchSource:0}: Error finding container 009b99edd6555f3dc76cac8fe03fddf7d131e2648dba9d5e50c3475df3ff2be6: Status 404 returned error can't find the container with id 009b99edd6555f3dc76cac8fe03fddf7d131e2648dba9d5e50c3475df3ff2be6 Feb 27 08:37:17 crc kubenswrapper[4906]: I0227 08:37:17.344228 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jsxjh"] Feb 27 08:37:17 crc kubenswrapper[4906]: W0227 08:37:17.410088 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3a626b5a_ab13_4bb5_9f57_9deb8bfc27bb.slice/crio-656c0cd19e581853c92954f2db960ef479408a156350727214b37878c70df25f WatchSource:0}: Error finding container 656c0cd19e581853c92954f2db960ef479408a156350727214b37878c70df25f: Status 404 returned error can't find the container with id 656c0cd19e581853c92954f2db960ef479408a156350727214b37878c70df25f Feb 27 08:37:17 crc kubenswrapper[4906]: I0227 08:37:17.522087 4906 generic.go:334] "Generic (PLEG): container finished" podID="3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3" containerID="6f7f412605f75c348ff3801a602f0d3e081b0dde838e9c200cfff5ecd263f40f" exitCode=0 Feb 27 08:37:17 crc kubenswrapper[4906]: I0227 08:37:17.522200 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6bqtn" event={"ID":"3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3","Type":"ContainerDied","Data":"6f7f412605f75c348ff3801a602f0d3e081b0dde838e9c200cfff5ecd263f40f"} Feb 27 08:37:17 crc kubenswrapper[4906]: I0227 08:37:17.524851 4906 generic.go:334] "Generic (PLEG): container finished" podID="0eebf875-9038-4025-bc93-6d759229f64c" containerID="b9167925de329113b4a39cf2eb9daf46db3bf3d26b50be5e9396ac288ce17372" exitCode=0 Feb 27 08:37:17 crc kubenswrapper[4906]: I0227 08:37:17.524909 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w5gxv" event={"ID":"0eebf875-9038-4025-bc93-6d759229f64c","Type":"ContainerDied","Data":"b9167925de329113b4a39cf2eb9daf46db3bf3d26b50be5e9396ac288ce17372"} Feb 27 08:37:17 crc kubenswrapper[4906]: I0227 08:37:17.528414 4906 generic.go:334] "Generic (PLEG): container finished" podID="104e053c-89a4-4b45-b02f-26a3d6b0191c" containerID="04256c90e94479690b295c5586d514c4cc9769f006b500830a72529ad91099cf" exitCode=0 Feb 27 08:37:17 crc kubenswrapper[4906]: I0227 08:37:17.528748 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nzkhk" event={"ID":"104e053c-89a4-4b45-b02f-26a3d6b0191c","Type":"ContainerDied","Data":"04256c90e94479690b295c5586d514c4cc9769f006b500830a72529ad91099cf"} Feb 27 08:37:17 crc kubenswrapper[4906]: I0227 08:37:17.528910 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nzkhk" event={"ID":"104e053c-89a4-4b45-b02f-26a3d6b0191c","Type":"ContainerStarted","Data":"009b99edd6555f3dc76cac8fe03fddf7d131e2648dba9d5e50c3475df3ff2be6"} Feb 27 08:37:17 crc kubenswrapper[4906]: I0227 08:37:17.531595 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jsxjh" event={"ID":"3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb","Type":"ContainerStarted","Data":"656c0cd19e581853c92954f2db960ef479408a156350727214b37878c70df25f"} Feb 27 08:37:18 crc kubenswrapper[4906]: I0227 08:37:18.541802 4906 generic.go:334] "Generic (PLEG): container finished" podID="3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb" containerID="d2c99b5ea7910ebbea42bf19b4bec9b950b2c90ddf6a81d18ae4ace36f023e5b" exitCode=0 Feb 27 08:37:18 crc kubenswrapper[4906]: I0227 08:37:18.541934 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jsxjh" event={"ID":"3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb","Type":"ContainerDied","Data":"d2c99b5ea7910ebbea42bf19b4bec9b950b2c90ddf6a81d18ae4ace36f023e5b"} Feb 27 08:37:19 crc kubenswrapper[4906]: I0227 08:37:19.552854 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-w5gxv" event={"ID":"0eebf875-9038-4025-bc93-6d759229f64c","Type":"ContainerStarted","Data":"81f6c5d56d56889a4e6b9bc52a9b2d0acbc2c03dba73d0492b51d7657ba811e1"} Feb 27 08:37:19 crc kubenswrapper[4906]: I0227 08:37:19.556974 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nzkhk" event={"ID":"104e053c-89a4-4b45-b02f-26a3d6b0191c","Type":"ContainerStarted","Data":"861957471ec77febfbad323cca32f107da6f31c88a792c64d6bac76afb8c1631"} Feb 27 08:37:19 crc kubenswrapper[4906]: I0227 08:37:19.559634 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-6bqtn" event={"ID":"3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3","Type":"ContainerStarted","Data":"803c414a6a2584bbe33046c93352127c39fe77732716c45d8fdfdee7f5958729"} Feb 27 08:37:19 crc kubenswrapper[4906]: I0227 08:37:19.574044 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-w5gxv" podStartSLOduration=2.710780947 podStartE2EDuration="5.574016541s" podCreationTimestamp="2026-02-27 08:37:14 +0000 UTC" firstStartedPulling="2026-02-27 08:37:15.508346523 +0000 UTC m=+533.902748153" lastFinishedPulling="2026-02-27 08:37:18.371582097 +0000 UTC m=+536.765983747" observedRunningTime="2026-02-27 08:37:19.573048636 +0000 UTC m=+537.967450246" watchObservedRunningTime="2026-02-27 08:37:19.574016541 +0000 UTC m=+537.968418171" Feb 27 08:37:19 crc kubenswrapper[4906]: I0227 08:37:19.627460 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-6bqtn" podStartSLOduration=3.93538485 podStartE2EDuration="6.627431548s" podCreationTimestamp="2026-02-27 08:37:13 +0000 UTC" firstStartedPulling="2026-02-27 08:37:15.508579509 +0000 UTC m=+533.902981109" lastFinishedPulling="2026-02-27 08:37:18.200626187 +0000 UTC m=+536.595027807" observedRunningTime="2026-02-27 08:37:19.620807874 +0000 UTC m=+538.015209474" watchObservedRunningTime="2026-02-27 08:37:19.627431548 +0000 UTC m=+538.021833158" Feb 27 08:37:20 crc kubenswrapper[4906]: I0227 08:37:20.569121 4906 generic.go:334] "Generic (PLEG): container finished" podID="104e053c-89a4-4b45-b02f-26a3d6b0191c" containerID="861957471ec77febfbad323cca32f107da6f31c88a792c64d6bac76afb8c1631" exitCode=0 Feb 27 08:37:20 crc kubenswrapper[4906]: I0227 08:37:20.585281 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nzkhk" event={"ID":"104e053c-89a4-4b45-b02f-26a3d6b0191c","Type":"ContainerDied","Data":"861957471ec77febfbad323cca32f107da6f31c88a792c64d6bac76afb8c1631"} Feb 27 08:37:21 crc kubenswrapper[4906]: I0227 08:37:21.579022 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jsxjh" event={"ID":"3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb","Type":"ContainerStarted","Data":"8468e7374dcd57b0bdb762552e4e79c5d0a32109d1d74837a7173f0082d7f7e2"} Feb 27 08:37:21 crc kubenswrapper[4906]: I0227 08:37:21.583542 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nzkhk" event={"ID":"104e053c-89a4-4b45-b02f-26a3d6b0191c","Type":"ContainerStarted","Data":"719e133e2c50a1e0ec56380bb7a9f6d70ad469116a43d8979c7bf4cee9c5c270"} Feb 27 08:37:21 crc kubenswrapper[4906]: I0227 08:37:21.626310 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-nzkhk" podStartSLOduration=2.046147549 podStartE2EDuration="5.626285274s" podCreationTimestamp="2026-02-27 08:37:16 +0000 UTC" firstStartedPulling="2026-02-27 08:37:17.531599297 +0000 UTC m=+535.926000907" lastFinishedPulling="2026-02-27 08:37:21.111737022 +0000 UTC m=+539.506138632" observedRunningTime="2026-02-27 08:37:21.623637845 +0000 UTC m=+540.018039455" watchObservedRunningTime="2026-02-27 08:37:21.626285274 +0000 UTC m=+540.020686884" Feb 27 08:37:22 crc kubenswrapper[4906]: I0227 08:37:22.596257 4906 generic.go:334] "Generic (PLEG): container finished" podID="3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb" containerID="8468e7374dcd57b0bdb762552e4e79c5d0a32109d1d74837a7173f0082d7f7e2" exitCode=0 Feb 27 08:37:22 crc kubenswrapper[4906]: I0227 08:37:22.596381 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jsxjh" event={"ID":"3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb","Type":"ContainerDied","Data":"8468e7374dcd57b0bdb762552e4e79c5d0a32109d1d74837a7173f0082d7f7e2"} Feb 27 08:37:23 crc kubenswrapper[4906]: I0227 08:37:23.606274 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jsxjh" event={"ID":"3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb","Type":"ContainerStarted","Data":"de2be44bc82674a6c45a68ec4c99d890a23f79ec50908b82dde5550ed48988b7"} Feb 27 08:37:23 crc kubenswrapper[4906]: I0227 08:37:23.628196 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jsxjh" podStartSLOduration=2.97874609 podStartE2EDuration="7.628168549s" podCreationTimestamp="2026-02-27 08:37:16 +0000 UTC" firstStartedPulling="2026-02-27 08:37:18.632550439 +0000 UTC m=+537.026952049" lastFinishedPulling="2026-02-27 08:37:23.281972898 +0000 UTC m=+541.676374508" observedRunningTime="2026-02-27 08:37:23.626355722 +0000 UTC m=+542.020757332" watchObservedRunningTime="2026-02-27 08:37:23.628168549 +0000 UTC m=+542.022570169" Feb 27 08:37:24 crc kubenswrapper[4906]: I0227 08:37:24.296456 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-6bqtn" Feb 27 08:37:24 crc kubenswrapper[4906]: I0227 08:37:24.297181 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-6bqtn" Feb 27 08:37:24 crc kubenswrapper[4906]: I0227 08:37:24.340405 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-6bqtn" Feb 27 08:37:24 crc kubenswrapper[4906]: I0227 08:37:24.481439 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-w5gxv" Feb 27 08:37:24 crc kubenswrapper[4906]: I0227 08:37:24.481521 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-w5gxv" Feb 27 08:37:24 crc kubenswrapper[4906]: I0227 08:37:24.546779 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-w5gxv" Feb 27 08:37:24 crc kubenswrapper[4906]: I0227 08:37:24.657427 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-6bqtn" Feb 27 08:37:24 crc kubenswrapper[4906]: I0227 08:37:24.666861 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-w5gxv" Feb 27 08:37:26 crc kubenswrapper[4906]: I0227 08:37:26.722013 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-nzkhk" Feb 27 08:37:26 crc kubenswrapper[4906]: I0227 08:37:26.722620 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-nzkhk" Feb 27 08:37:26 crc kubenswrapper[4906]: I0227 08:37:26.772932 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-nzkhk" Feb 27 08:37:26 crc kubenswrapper[4906]: I0227 08:37:26.940087 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jsxjh" Feb 27 08:37:26 crc kubenswrapper[4906]: I0227 08:37:26.940170 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jsxjh" Feb 27 08:37:27 crc kubenswrapper[4906]: I0227 08:37:27.666238 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-nzkhk" Feb 27 08:37:27 crc kubenswrapper[4906]: I0227 08:37:27.979194 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-jsxjh" podUID="3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb" containerName="registry-server" probeResult="failure" output=< Feb 27 08:37:27 crc kubenswrapper[4906]: timeout: failed to connect service ":50051" within 1s Feb 27 08:37:27 crc kubenswrapper[4906]: > Feb 27 08:37:36 crc kubenswrapper[4906]: I0227 08:37:36.981307 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jsxjh" Feb 27 08:37:37 crc kubenswrapper[4906]: I0227 08:37:37.027287 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jsxjh" Feb 27 08:38:00 crc kubenswrapper[4906]: I0227 08:38:00.161198 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536358-tb244"] Feb 27 08:38:00 crc kubenswrapper[4906]: I0227 08:38:00.162702 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536358-tb244" Feb 27 08:38:00 crc kubenswrapper[4906]: I0227 08:38:00.166010 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 08:38:00 crc kubenswrapper[4906]: I0227 08:38:00.166166 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 08:38:00 crc kubenswrapper[4906]: I0227 08:38:00.166735 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 08:38:00 crc kubenswrapper[4906]: I0227 08:38:00.181690 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536358-tb244"] Feb 27 08:38:00 crc kubenswrapper[4906]: I0227 08:38:00.212599 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrjqb\" (UniqueName: \"kubernetes.io/projected/8e8b22f9-fdf8-40a7-881e-fdcb521e7ece-kube-api-access-mrjqb\") pod \"auto-csr-approver-29536358-tb244\" (UID: \"8e8b22f9-fdf8-40a7-881e-fdcb521e7ece\") " pod="openshift-infra/auto-csr-approver-29536358-tb244" Feb 27 08:38:00 crc kubenswrapper[4906]: I0227 08:38:00.313574 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrjqb\" (UniqueName: \"kubernetes.io/projected/8e8b22f9-fdf8-40a7-881e-fdcb521e7ece-kube-api-access-mrjqb\") pod \"auto-csr-approver-29536358-tb244\" (UID: \"8e8b22f9-fdf8-40a7-881e-fdcb521e7ece\") " pod="openshift-infra/auto-csr-approver-29536358-tb244" Feb 27 08:38:00 crc kubenswrapper[4906]: I0227 08:38:00.335508 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrjqb\" (UniqueName: \"kubernetes.io/projected/8e8b22f9-fdf8-40a7-881e-fdcb521e7ece-kube-api-access-mrjqb\") pod \"auto-csr-approver-29536358-tb244\" (UID: \"8e8b22f9-fdf8-40a7-881e-fdcb521e7ece\") " pod="openshift-infra/auto-csr-approver-29536358-tb244" Feb 27 08:38:00 crc kubenswrapper[4906]: I0227 08:38:00.490061 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536358-tb244" Feb 27 08:38:00 crc kubenswrapper[4906]: I0227 08:38:00.913233 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536358-tb244"] Feb 27 08:38:01 crc kubenswrapper[4906]: I0227 08:38:01.865069 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536358-tb244" event={"ID":"8e8b22f9-fdf8-40a7-881e-fdcb521e7ece","Type":"ContainerStarted","Data":"a1fb3870144951d7a6b7bf2fb9a57a2c130ad21de45c1a455b1905dd92b14c87"} Feb 27 08:38:03 crc kubenswrapper[4906]: I0227 08:38:03.879099 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536358-tb244" event={"ID":"8e8b22f9-fdf8-40a7-881e-fdcb521e7ece","Type":"ContainerStarted","Data":"6f9afb2763e4e7b5ed12d8fe21c2b4622f5745e9f65938cfc3f24772508abbdd"} Feb 27 08:38:03 crc kubenswrapper[4906]: I0227 08:38:03.903292 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-infra/auto-csr-approver-29536358-tb244" podStartSLOduration=1.296328101 podStartE2EDuration="3.903256743s" podCreationTimestamp="2026-02-27 08:38:00 +0000 UTC" firstStartedPulling="2026-02-27 08:38:00.927930269 +0000 UTC m=+579.322331879" lastFinishedPulling="2026-02-27 08:38:03.534858911 +0000 UTC m=+581.929260521" observedRunningTime="2026-02-27 08:38:03.897084451 +0000 UTC m=+582.291486061" watchObservedRunningTime="2026-02-27 08:38:03.903256743 +0000 UTC m=+582.297658393" Feb 27 08:38:04 crc kubenswrapper[4906]: I0227 08:38:04.890501 4906 generic.go:334] "Generic (PLEG): container finished" podID="8e8b22f9-fdf8-40a7-881e-fdcb521e7ece" containerID="6f9afb2763e4e7b5ed12d8fe21c2b4622f5745e9f65938cfc3f24772508abbdd" exitCode=0 Feb 27 08:38:04 crc kubenswrapper[4906]: I0227 08:38:04.890623 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536358-tb244" event={"ID":"8e8b22f9-fdf8-40a7-881e-fdcb521e7ece","Type":"ContainerDied","Data":"6f9afb2763e4e7b5ed12d8fe21c2b4622f5745e9f65938cfc3f24772508abbdd"} Feb 27 08:38:06 crc kubenswrapper[4906]: I0227 08:38:06.211765 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536358-tb244" Feb 27 08:38:06 crc kubenswrapper[4906]: I0227 08:38:06.339552 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrjqb\" (UniqueName: \"kubernetes.io/projected/8e8b22f9-fdf8-40a7-881e-fdcb521e7ece-kube-api-access-mrjqb\") pod \"8e8b22f9-fdf8-40a7-881e-fdcb521e7ece\" (UID: \"8e8b22f9-fdf8-40a7-881e-fdcb521e7ece\") " Feb 27 08:38:06 crc kubenswrapper[4906]: I0227 08:38:06.347208 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e8b22f9-fdf8-40a7-881e-fdcb521e7ece-kube-api-access-mrjqb" (OuterVolumeSpecName: "kube-api-access-mrjqb") pod "8e8b22f9-fdf8-40a7-881e-fdcb521e7ece" (UID: "8e8b22f9-fdf8-40a7-881e-fdcb521e7ece"). InnerVolumeSpecName "kube-api-access-mrjqb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:38:06 crc kubenswrapper[4906]: I0227 08:38:06.441907 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrjqb\" (UniqueName: \"kubernetes.io/projected/8e8b22f9-fdf8-40a7-881e-fdcb521e7ece-kube-api-access-mrjqb\") on node \"crc\" DevicePath \"\"" Feb 27 08:38:06 crc kubenswrapper[4906]: I0227 08:38:06.909367 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536358-tb244" event={"ID":"8e8b22f9-fdf8-40a7-881e-fdcb521e7ece","Type":"ContainerDied","Data":"a1fb3870144951d7a6b7bf2fb9a57a2c130ad21de45c1a455b1905dd92b14c87"} Feb 27 08:38:06 crc kubenswrapper[4906]: I0227 08:38:06.909430 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a1fb3870144951d7a6b7bf2fb9a57a2c130ad21de45c1a455b1905dd92b14c87" Feb 27 08:38:06 crc kubenswrapper[4906]: I0227 08:38:06.909440 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536358-tb244" Feb 27 08:38:06 crc kubenswrapper[4906]: I0227 08:38:06.976710 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536352-8fqhc"] Feb 27 08:38:06 crc kubenswrapper[4906]: I0227 08:38:06.980358 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536352-8fqhc"] Feb 27 08:38:08 crc kubenswrapper[4906]: I0227 08:38:08.562034 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="582fc06a-0d1d-4260-a91f-af317ab278d9" path="/var/lib/kubelet/pods/582fc06a-0d1d-4260-a91f-af317ab278d9/volumes" Feb 27 08:39:24 crc kubenswrapper[4906]: I0227 08:39:24.843935 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:39:24 crc kubenswrapper[4906]: I0227 08:39:24.844665 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:39:54 crc kubenswrapper[4906]: I0227 08:39:54.844214 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:39:54 crc kubenswrapper[4906]: I0227 08:39:54.845216 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:40:00 crc kubenswrapper[4906]: I0227 08:40:00.151264 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536360-pksm2"] Feb 27 08:40:00 crc kubenswrapper[4906]: E0227 08:40:00.151986 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e8b22f9-fdf8-40a7-881e-fdcb521e7ece" containerName="oc" Feb 27 08:40:00 crc kubenswrapper[4906]: I0227 08:40:00.152045 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e8b22f9-fdf8-40a7-881e-fdcb521e7ece" containerName="oc" Feb 27 08:40:00 crc kubenswrapper[4906]: I0227 08:40:00.152186 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e8b22f9-fdf8-40a7-881e-fdcb521e7ece" containerName="oc" Feb 27 08:40:00 crc kubenswrapper[4906]: I0227 08:40:00.152626 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536360-pksm2" Feb 27 08:40:00 crc kubenswrapper[4906]: I0227 08:40:00.155284 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 08:40:00 crc kubenswrapper[4906]: I0227 08:40:00.155645 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 08:40:00 crc kubenswrapper[4906]: I0227 08:40:00.155832 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 08:40:00 crc kubenswrapper[4906]: I0227 08:40:00.161392 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536360-pksm2"] Feb 27 08:40:00 crc kubenswrapper[4906]: I0227 08:40:00.251854 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfdst\" (UniqueName: \"kubernetes.io/projected/fdb5a434-dd10-4c94-836b-3333e84775d7-kube-api-access-mfdst\") pod \"auto-csr-approver-29536360-pksm2\" (UID: \"fdb5a434-dd10-4c94-836b-3333e84775d7\") " pod="openshift-infra/auto-csr-approver-29536360-pksm2" Feb 27 08:40:00 crc kubenswrapper[4906]: I0227 08:40:00.353669 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfdst\" (UniqueName: \"kubernetes.io/projected/fdb5a434-dd10-4c94-836b-3333e84775d7-kube-api-access-mfdst\") pod \"auto-csr-approver-29536360-pksm2\" (UID: \"fdb5a434-dd10-4c94-836b-3333e84775d7\") " pod="openshift-infra/auto-csr-approver-29536360-pksm2" Feb 27 08:40:00 crc kubenswrapper[4906]: I0227 08:40:00.378143 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfdst\" (UniqueName: \"kubernetes.io/projected/fdb5a434-dd10-4c94-836b-3333e84775d7-kube-api-access-mfdst\") pod \"auto-csr-approver-29536360-pksm2\" (UID: \"fdb5a434-dd10-4c94-836b-3333e84775d7\") " pod="openshift-infra/auto-csr-approver-29536360-pksm2" Feb 27 08:40:00 crc kubenswrapper[4906]: I0227 08:40:00.482452 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536360-pksm2" Feb 27 08:40:00 crc kubenswrapper[4906]: I0227 08:40:00.701793 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536360-pksm2"] Feb 27 08:40:01 crc kubenswrapper[4906]: I0227 08:40:01.683980 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536360-pksm2" event={"ID":"fdb5a434-dd10-4c94-836b-3333e84775d7","Type":"ContainerStarted","Data":"2e440c5a5984bf860f05263ba4617361046d362d16b5f6931800e135aff988e1"} Feb 27 08:40:02 crc kubenswrapper[4906]: I0227 08:40:02.692524 4906 generic.go:334] "Generic (PLEG): container finished" podID="fdb5a434-dd10-4c94-836b-3333e84775d7" containerID="dd5f3adb691601d4137551a08699ae2905a09d9154300709a257b60df0bdbea9" exitCode=0 Feb 27 08:40:02 crc kubenswrapper[4906]: I0227 08:40:02.692609 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536360-pksm2" event={"ID":"fdb5a434-dd10-4c94-836b-3333e84775d7","Type":"ContainerDied","Data":"dd5f3adb691601d4137551a08699ae2905a09d9154300709a257b60df0bdbea9"} Feb 27 08:40:03 crc kubenswrapper[4906]: I0227 08:40:03.926446 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536360-pksm2" Feb 27 08:40:04 crc kubenswrapper[4906]: I0227 08:40:04.016286 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mfdst\" (UniqueName: \"kubernetes.io/projected/fdb5a434-dd10-4c94-836b-3333e84775d7-kube-api-access-mfdst\") pod \"fdb5a434-dd10-4c94-836b-3333e84775d7\" (UID: \"fdb5a434-dd10-4c94-836b-3333e84775d7\") " Feb 27 08:40:04 crc kubenswrapper[4906]: I0227 08:40:04.029950 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdb5a434-dd10-4c94-836b-3333e84775d7-kube-api-access-mfdst" (OuterVolumeSpecName: "kube-api-access-mfdst") pod "fdb5a434-dd10-4c94-836b-3333e84775d7" (UID: "fdb5a434-dd10-4c94-836b-3333e84775d7"). InnerVolumeSpecName "kube-api-access-mfdst". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:40:04 crc kubenswrapper[4906]: I0227 08:40:04.118289 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mfdst\" (UniqueName: \"kubernetes.io/projected/fdb5a434-dd10-4c94-836b-3333e84775d7-kube-api-access-mfdst\") on node \"crc\" DevicePath \"\"" Feb 27 08:40:04 crc kubenswrapper[4906]: I0227 08:40:04.712710 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536360-pksm2" event={"ID":"fdb5a434-dd10-4c94-836b-3333e84775d7","Type":"ContainerDied","Data":"2e440c5a5984bf860f05263ba4617361046d362d16b5f6931800e135aff988e1"} Feb 27 08:40:04 crc kubenswrapper[4906]: I0227 08:40:04.712775 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e440c5a5984bf860f05263ba4617361046d362d16b5f6931800e135aff988e1" Feb 27 08:40:04 crc kubenswrapper[4906]: I0227 08:40:04.712784 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536360-pksm2" Feb 27 08:40:04 crc kubenswrapper[4906]: I0227 08:40:04.995454 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536354-2w5js"] Feb 27 08:40:04 crc kubenswrapper[4906]: I0227 08:40:04.998934 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536354-2w5js"] Feb 27 08:40:06 crc kubenswrapper[4906]: I0227 08:40:06.568204 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="41d7dad5-7a02-4698-9385-0673efb99b6a" path="/var/lib/kubelet/pods/41d7dad5-7a02-4698-9385-0673efb99b6a/volumes" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.248945 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-ltz5p"] Feb 27 08:40:16 crc kubenswrapper[4906]: E0227 08:40:16.250163 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdb5a434-dd10-4c94-836b-3333e84775d7" containerName="oc" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.250182 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdb5a434-dd10-4c94-836b-3333e84775d7" containerName="oc" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.250316 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdb5a434-dd10-4c94-836b-3333e84775d7" containerName="oc" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.250851 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.270114 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-ltz5p"] Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.414629 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b416f313-e85a-4aa9-a54e-5717dc9cc367-ca-trust-extracted\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.414688 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b416f313-e85a-4aa9-a54e-5717dc9cc367-trusted-ca\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.414730 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b416f313-e85a-4aa9-a54e-5717dc9cc367-registry-certificates\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.414749 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b416f313-e85a-4aa9-a54e-5717dc9cc367-registry-tls\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.414777 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-42dp5\" (UniqueName: \"kubernetes.io/projected/b416f313-e85a-4aa9-a54e-5717dc9cc367-kube-api-access-42dp5\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.414807 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b416f313-e85a-4aa9-a54e-5717dc9cc367-bound-sa-token\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.414847 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.414871 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b416f313-e85a-4aa9-a54e-5717dc9cc367-installation-pull-secrets\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.437677 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.516593 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b416f313-e85a-4aa9-a54e-5717dc9cc367-registry-certificates\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.516656 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b416f313-e85a-4aa9-a54e-5717dc9cc367-registry-tls\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.516679 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-42dp5\" (UniqueName: \"kubernetes.io/projected/b416f313-e85a-4aa9-a54e-5717dc9cc367-kube-api-access-42dp5\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.516701 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b416f313-e85a-4aa9-a54e-5717dc9cc367-bound-sa-token\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.516740 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b416f313-e85a-4aa9-a54e-5717dc9cc367-installation-pull-secrets\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.516776 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b416f313-e85a-4aa9-a54e-5717dc9cc367-ca-trust-extracted\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.516800 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b416f313-e85a-4aa9-a54e-5717dc9cc367-trusted-ca\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.517707 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/b416f313-e85a-4aa9-a54e-5717dc9cc367-ca-trust-extracted\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.518140 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/b416f313-e85a-4aa9-a54e-5717dc9cc367-registry-certificates\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.519172 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b416f313-e85a-4aa9-a54e-5717dc9cc367-trusted-ca\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.524665 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/b416f313-e85a-4aa9-a54e-5717dc9cc367-installation-pull-secrets\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.525871 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/b416f313-e85a-4aa9-a54e-5717dc9cc367-registry-tls\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.535066 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/b416f313-e85a-4aa9-a54e-5717dc9cc367-bound-sa-token\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.537084 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-42dp5\" (UniqueName: \"kubernetes.io/projected/b416f313-e85a-4aa9-a54e-5717dc9cc367-kube-api-access-42dp5\") pod \"image-registry-66df7c8f76-ltz5p\" (UID: \"b416f313-e85a-4aa9-a54e-5717dc9cc367\") " pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.612778 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:16 crc kubenswrapper[4906]: I0227 08:40:16.817055 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-ltz5p"] Feb 27 08:40:17 crc kubenswrapper[4906]: I0227 08:40:17.825965 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" event={"ID":"b416f313-e85a-4aa9-a54e-5717dc9cc367","Type":"ContainerStarted","Data":"14d916f7ef753efdc316e694c1b5de4a8597d7f4439c5d296f04494a545df118"} Feb 27 08:40:17 crc kubenswrapper[4906]: I0227 08:40:17.826296 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" event={"ID":"b416f313-e85a-4aa9-a54e-5717dc9cc367","Type":"ContainerStarted","Data":"c8c290e48419cea3a46bb04f49c57d3d71b37d679a62491f5bf35d502a0033a0"} Feb 27 08:40:17 crc kubenswrapper[4906]: I0227 08:40:17.826318 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:17 crc kubenswrapper[4906]: I0227 08:40:17.863395 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" podStartSLOduration=1.8633686699999998 podStartE2EDuration="1.86336867s" podCreationTimestamp="2026-02-27 08:40:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:40:17.859652044 +0000 UTC m=+716.254053674" watchObservedRunningTime="2026-02-27 08:40:17.86336867 +0000 UTC m=+716.257770280" Feb 27 08:40:24 crc kubenswrapper[4906]: I0227 08:40:24.844778 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:40:24 crc kubenswrapper[4906]: I0227 08:40:24.845696 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:40:24 crc kubenswrapper[4906]: I0227 08:40:24.845752 4906 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:40:24 crc kubenswrapper[4906]: I0227 08:40:24.846655 4906 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5279e6ebbd53a2bdf9a1dc599b1baf42994b0cc910b5f1d6e07817732e7c99c7"} pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 27 08:40:24 crc kubenswrapper[4906]: I0227 08:40:24.846721 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" containerID="cri-o://5279e6ebbd53a2bdf9a1dc599b1baf42994b0cc910b5f1d6e07817732e7c99c7" gracePeriod=600 Feb 27 08:40:25 crc kubenswrapper[4906]: I0227 08:40:25.881577 4906 generic.go:334] "Generic (PLEG): container finished" podID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerID="5279e6ebbd53a2bdf9a1dc599b1baf42994b0cc910b5f1d6e07817732e7c99c7" exitCode=0 Feb 27 08:40:25 crc kubenswrapper[4906]: I0227 08:40:25.881664 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerDied","Data":"5279e6ebbd53a2bdf9a1dc599b1baf42994b0cc910b5f1d6e07817732e7c99c7"} Feb 27 08:40:25 crc kubenswrapper[4906]: I0227 08:40:25.882465 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerStarted","Data":"dd35a2232190406068e2b0898196cd8569373748f1f44babb52802f39e40a3ab"} Feb 27 08:40:25 crc kubenswrapper[4906]: I0227 08:40:25.882500 4906 scope.go:117] "RemoveContainer" containerID="432a2ec448d5afa76b89b67103131d632bfc1e942f1d1803d36030738d876711" Feb 27 08:40:36 crc kubenswrapper[4906]: I0227 08:40:36.619337 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-ltz5p" Feb 27 08:40:36 crc kubenswrapper[4906]: I0227 08:40:36.672777 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-km47t"] Feb 27 08:40:55 crc kubenswrapper[4906]: I0227 08:40:55.972764 4906 scope.go:117] "RemoveContainer" containerID="6708667a2cf57f1dee07a8612bd25d94517ec0a1b55cb53f9c9397cb3bc00ebe" Feb 27 08:40:56 crc kubenswrapper[4906]: I0227 08:40:56.004437 4906 scope.go:117] "RemoveContainer" containerID="03833bc81af18f8e05b37ffe1347f478a1b73a22faaff39a186c0c9246d4c1f0" Feb 27 08:40:56 crc kubenswrapper[4906]: I0227 08:40:56.080567 4906 scope.go:117] "RemoveContainer" containerID="726a8aa0c8e7c8d00a8188ed592f542bd801dddc049730f675f2fb4c83ee1125" Feb 27 08:41:01 crc kubenswrapper[4906]: I0227 08:41:01.721697 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-km47t" podUID="a7a26f83-d59b-4375-bcb0-89b52426dae7" containerName="registry" containerID="cri-o://b1f08ed83565ec4e80bac7cbe1426b4eaa0a0bab42d37a6b5a34a52b99843948" gracePeriod=30 Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.106186 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.152133 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-km47t" Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.152167 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-km47t" event={"ID":"a7a26f83-d59b-4375-bcb0-89b52426dae7","Type":"ContainerDied","Data":"b1f08ed83565ec4e80bac7cbe1426b4eaa0a0bab42d37a6b5a34a52b99843948"} Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.152929 4906 generic.go:334] "Generic (PLEG): container finished" podID="a7a26f83-d59b-4375-bcb0-89b52426dae7" containerID="b1f08ed83565ec4e80bac7cbe1426b4eaa0a0bab42d37a6b5a34a52b99843948" exitCode=0 Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.157514 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-km47t" event={"ID":"a7a26f83-d59b-4375-bcb0-89b52426dae7","Type":"ContainerDied","Data":"b3c4d00f92df2d4203d8930c6b443aa3fdc2b9ab062e6a8977f125ff9a142872"} Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.157617 4906 scope.go:117] "RemoveContainer" containerID="b1f08ed83565ec4e80bac7cbe1426b4eaa0a0bab42d37a6b5a34a52b99843948" Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.181974 4906 scope.go:117] "RemoveContainer" containerID="b1f08ed83565ec4e80bac7cbe1426b4eaa0a0bab42d37a6b5a34a52b99843948" Feb 27 08:41:02 crc kubenswrapper[4906]: E0227 08:41:02.182792 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1f08ed83565ec4e80bac7cbe1426b4eaa0a0bab42d37a6b5a34a52b99843948\": container with ID starting with b1f08ed83565ec4e80bac7cbe1426b4eaa0a0bab42d37a6b5a34a52b99843948 not found: ID does not exist" containerID="b1f08ed83565ec4e80bac7cbe1426b4eaa0a0bab42d37a6b5a34a52b99843948" Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.182838 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1f08ed83565ec4e80bac7cbe1426b4eaa0a0bab42d37a6b5a34a52b99843948"} err="failed to get container status \"b1f08ed83565ec4e80bac7cbe1426b4eaa0a0bab42d37a6b5a34a52b99843948\": rpc error: code = NotFound desc = could not find container \"b1f08ed83565ec4e80bac7cbe1426b4eaa0a0bab42d37a6b5a34a52b99843948\": container with ID starting with b1f08ed83565ec4e80bac7cbe1426b4eaa0a0bab42d37a6b5a34a52b99843948 not found: ID does not exist" Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.208910 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a7a26f83-d59b-4375-bcb0-89b52426dae7-installation-pull-secrets\") pod \"a7a26f83-d59b-4375-bcb0-89b52426dae7\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.208964 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a7a26f83-d59b-4375-bcb0-89b52426dae7-registry-certificates\") pod \"a7a26f83-d59b-4375-bcb0-89b52426dae7\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.209254 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"a7a26f83-d59b-4375-bcb0-89b52426dae7\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.209285 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a7a26f83-d59b-4375-bcb0-89b52426dae7-bound-sa-token\") pod \"a7a26f83-d59b-4375-bcb0-89b52426dae7\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.209330 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a7a26f83-d59b-4375-bcb0-89b52426dae7-trusted-ca\") pod \"a7a26f83-d59b-4375-bcb0-89b52426dae7\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.209361 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a7a26f83-d59b-4375-bcb0-89b52426dae7-registry-tls\") pod \"a7a26f83-d59b-4375-bcb0-89b52426dae7\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.209432 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a7a26f83-d59b-4375-bcb0-89b52426dae7-ca-trust-extracted\") pod \"a7a26f83-d59b-4375-bcb0-89b52426dae7\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.209449 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmxfn\" (UniqueName: \"kubernetes.io/projected/a7a26f83-d59b-4375-bcb0-89b52426dae7-kube-api-access-pmxfn\") pod \"a7a26f83-d59b-4375-bcb0-89b52426dae7\" (UID: \"a7a26f83-d59b-4375-bcb0-89b52426dae7\") " Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.212547 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7a26f83-d59b-4375-bcb0-89b52426dae7-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a7a26f83-d59b-4375-bcb0-89b52426dae7" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.213723 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7a26f83-d59b-4375-bcb0-89b52426dae7-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "a7a26f83-d59b-4375-bcb0-89b52426dae7" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.217181 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7a26f83-d59b-4375-bcb0-89b52426dae7-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a7a26f83-d59b-4375-bcb0-89b52426dae7" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.217302 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7a26f83-d59b-4375-bcb0-89b52426dae7-kube-api-access-pmxfn" (OuterVolumeSpecName: "kube-api-access-pmxfn") pod "a7a26f83-d59b-4375-bcb0-89b52426dae7" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7"). InnerVolumeSpecName "kube-api-access-pmxfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.217486 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7a26f83-d59b-4375-bcb0-89b52426dae7-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "a7a26f83-d59b-4375-bcb0-89b52426dae7" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.217799 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7a26f83-d59b-4375-bcb0-89b52426dae7-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "a7a26f83-d59b-4375-bcb0-89b52426dae7" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.227413 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "a7a26f83-d59b-4375-bcb0-89b52426dae7" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.233180 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7a26f83-d59b-4375-bcb0-89b52426dae7-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "a7a26f83-d59b-4375-bcb0-89b52426dae7" (UID: "a7a26f83-d59b-4375-bcb0-89b52426dae7"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.312808 4906 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a7a26f83-d59b-4375-bcb0-89b52426dae7-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.313118 4906 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a7a26f83-d59b-4375-bcb0-89b52426dae7-registry-certificates\") on node \"crc\" DevicePath \"\"" Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.313199 4906 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a7a26f83-d59b-4375-bcb0-89b52426dae7-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.313317 4906 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a7a26f83-d59b-4375-bcb0-89b52426dae7-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.313420 4906 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a7a26f83-d59b-4375-bcb0-89b52426dae7-registry-tls\") on node \"crc\" DevicePath \"\"" Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.313591 4906 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a7a26f83-d59b-4375-bcb0-89b52426dae7-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.313666 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmxfn\" (UniqueName: \"kubernetes.io/projected/a7a26f83-d59b-4375-bcb0-89b52426dae7-kube-api-access-pmxfn\") on node \"crc\" DevicePath \"\"" Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.493855 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-km47t"] Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.497088 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-km47t"] Feb 27 08:41:02 crc kubenswrapper[4906]: I0227 08:41:02.559843 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7a26f83-d59b-4375-bcb0-89b52426dae7" path="/var/lib/kubelet/pods/a7a26f83-d59b-4375-bcb0-89b52426dae7/volumes" Feb 27 08:42:00 crc kubenswrapper[4906]: I0227 08:42:00.146102 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536362-q8kgf"] Feb 27 08:42:00 crc kubenswrapper[4906]: E0227 08:42:00.149812 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7a26f83-d59b-4375-bcb0-89b52426dae7" containerName="registry" Feb 27 08:42:00 crc kubenswrapper[4906]: I0227 08:42:00.150055 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7a26f83-d59b-4375-bcb0-89b52426dae7" containerName="registry" Feb 27 08:42:00 crc kubenswrapper[4906]: I0227 08:42:00.150403 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7a26f83-d59b-4375-bcb0-89b52426dae7" containerName="registry" Feb 27 08:42:00 crc kubenswrapper[4906]: I0227 08:42:00.151323 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536362-q8kgf" Feb 27 08:42:00 crc kubenswrapper[4906]: I0227 08:42:00.154076 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 08:42:00 crc kubenswrapper[4906]: I0227 08:42:00.154305 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536362-q8kgf"] Feb 27 08:42:00 crc kubenswrapper[4906]: I0227 08:42:00.154364 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 08:42:00 crc kubenswrapper[4906]: I0227 08:42:00.155164 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 08:42:00 crc kubenswrapper[4906]: I0227 08:42:00.198117 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvtgn\" (UniqueName: \"kubernetes.io/projected/decab1a7-e73d-43f1-bfd1-ac749354500a-kube-api-access-bvtgn\") pod \"auto-csr-approver-29536362-q8kgf\" (UID: \"decab1a7-e73d-43f1-bfd1-ac749354500a\") " pod="openshift-infra/auto-csr-approver-29536362-q8kgf" Feb 27 08:42:00 crc kubenswrapper[4906]: I0227 08:42:00.300080 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvtgn\" (UniqueName: \"kubernetes.io/projected/decab1a7-e73d-43f1-bfd1-ac749354500a-kube-api-access-bvtgn\") pod \"auto-csr-approver-29536362-q8kgf\" (UID: \"decab1a7-e73d-43f1-bfd1-ac749354500a\") " pod="openshift-infra/auto-csr-approver-29536362-q8kgf" Feb 27 08:42:00 crc kubenswrapper[4906]: I0227 08:42:00.321086 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvtgn\" (UniqueName: \"kubernetes.io/projected/decab1a7-e73d-43f1-bfd1-ac749354500a-kube-api-access-bvtgn\") pod \"auto-csr-approver-29536362-q8kgf\" (UID: \"decab1a7-e73d-43f1-bfd1-ac749354500a\") " pod="openshift-infra/auto-csr-approver-29536362-q8kgf" Feb 27 08:42:00 crc kubenswrapper[4906]: I0227 08:42:00.477398 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536362-q8kgf" Feb 27 08:42:00 crc kubenswrapper[4906]: I0227 08:42:00.663682 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536362-q8kgf"] Feb 27 08:42:01 crc kubenswrapper[4906]: I0227 08:42:01.577576 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536362-q8kgf" event={"ID":"decab1a7-e73d-43f1-bfd1-ac749354500a","Type":"ContainerStarted","Data":"22d36e4fd5880d8c6a47e405e1e7a84941b0c1f69845784a8962309513e48a52"} Feb 27 08:42:02 crc kubenswrapper[4906]: I0227 08:42:02.587066 4906 generic.go:334] "Generic (PLEG): container finished" podID="decab1a7-e73d-43f1-bfd1-ac749354500a" containerID="128b5cb607dca913cf1abfcf132d391f417fce5401683ad6b3e9ad3dd355ba6f" exitCode=0 Feb 27 08:42:02 crc kubenswrapper[4906]: I0227 08:42:02.587141 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536362-q8kgf" event={"ID":"decab1a7-e73d-43f1-bfd1-ac749354500a","Type":"ContainerDied","Data":"128b5cb607dca913cf1abfcf132d391f417fce5401683ad6b3e9ad3dd355ba6f"} Feb 27 08:42:03 crc kubenswrapper[4906]: I0227 08:42:03.819052 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536362-q8kgf" Feb 27 08:42:03 crc kubenswrapper[4906]: I0227 08:42:03.850042 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvtgn\" (UniqueName: \"kubernetes.io/projected/decab1a7-e73d-43f1-bfd1-ac749354500a-kube-api-access-bvtgn\") pod \"decab1a7-e73d-43f1-bfd1-ac749354500a\" (UID: \"decab1a7-e73d-43f1-bfd1-ac749354500a\") " Feb 27 08:42:03 crc kubenswrapper[4906]: I0227 08:42:03.858641 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/decab1a7-e73d-43f1-bfd1-ac749354500a-kube-api-access-bvtgn" (OuterVolumeSpecName: "kube-api-access-bvtgn") pod "decab1a7-e73d-43f1-bfd1-ac749354500a" (UID: "decab1a7-e73d-43f1-bfd1-ac749354500a"). InnerVolumeSpecName "kube-api-access-bvtgn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:42:03 crc kubenswrapper[4906]: I0227 08:42:03.952987 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvtgn\" (UniqueName: \"kubernetes.io/projected/decab1a7-e73d-43f1-bfd1-ac749354500a-kube-api-access-bvtgn\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:04 crc kubenswrapper[4906]: I0227 08:42:04.602063 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536362-q8kgf" event={"ID":"decab1a7-e73d-43f1-bfd1-ac749354500a","Type":"ContainerDied","Data":"22d36e4fd5880d8c6a47e405e1e7a84941b0c1f69845784a8962309513e48a52"} Feb 27 08:42:04 crc kubenswrapper[4906]: I0227 08:42:04.602630 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22d36e4fd5880d8c6a47e405e1e7a84941b0c1f69845784a8962309513e48a52" Feb 27 08:42:04 crc kubenswrapper[4906]: I0227 08:42:04.602368 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536362-q8kgf" Feb 27 08:42:04 crc kubenswrapper[4906]: I0227 08:42:04.905449 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536356-bbt2l"] Feb 27 08:42:04 crc kubenswrapper[4906]: I0227 08:42:04.910932 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536356-bbt2l"] Feb 27 08:42:06 crc kubenswrapper[4906]: I0227 08:42:06.560499 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3135b40-4120-49cf-9649-52a416ce5313" path="/var/lib/kubelet/pods/d3135b40-4120-49cf-9649-52a416ce5313/volumes" Feb 27 08:42:15 crc kubenswrapper[4906]: I0227 08:42:15.772784 4906 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 27 08:42:48 crc kubenswrapper[4906]: I0227 08:42:48.888961 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-fxldv"] Feb 27 08:42:48 crc kubenswrapper[4906]: E0227 08:42:48.890188 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="decab1a7-e73d-43f1-bfd1-ac749354500a" containerName="oc" Feb 27 08:42:48 crc kubenswrapper[4906]: I0227 08:42:48.890207 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="decab1a7-e73d-43f1-bfd1-ac749354500a" containerName="oc" Feb 27 08:42:48 crc kubenswrapper[4906]: I0227 08:42:48.890392 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="decab1a7-e73d-43f1-bfd1-ac749354500a" containerName="oc" Feb 27 08:42:48 crc kubenswrapper[4906]: I0227 08:42:48.890980 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-fxldv" Feb 27 08:42:48 crc kubenswrapper[4906]: I0227 08:42:48.893119 4906 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-grkv9" Feb 27 08:42:48 crc kubenswrapper[4906]: I0227 08:42:48.893376 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Feb 27 08:42:48 crc kubenswrapper[4906]: I0227 08:42:48.893493 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Feb 27 08:42:48 crc kubenswrapper[4906]: I0227 08:42:48.898380 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-fxldv"] Feb 27 08:42:48 crc kubenswrapper[4906]: I0227 08:42:48.943673 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-858654f9db-c6tfn"] Feb 27 08:42:48 crc kubenswrapper[4906]: I0227 08:42:48.944628 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-c6tfn" Feb 27 08:42:48 crc kubenswrapper[4906]: I0227 08:42:48.950603 4906 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-6twbx" Feb 27 08:42:48 crc kubenswrapper[4906]: I0227 08:42:48.953955 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-q2shx"] Feb 27 08:42:48 crc kubenswrapper[4906]: I0227 08:42:48.954942 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-q2shx" Feb 27 08:42:48 crc kubenswrapper[4906]: I0227 08:42:48.957703 4906 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-4w7gk" Feb 27 08:42:48 crc kubenswrapper[4906]: I0227 08:42:48.973468 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-c6tfn"] Feb 27 08:42:48 crc kubenswrapper[4906]: I0227 08:42:48.981257 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-q2shx"] Feb 27 08:42:48 crc kubenswrapper[4906]: I0227 08:42:48.985287 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjpcx\" (UniqueName: \"kubernetes.io/projected/45013531-55f5-4b7c-88e3-71a927eaed69-kube-api-access-xjpcx\") pod \"cert-manager-858654f9db-c6tfn\" (UID: \"45013531-55f5-4b7c-88e3-71a927eaed69\") " pod="cert-manager/cert-manager-858654f9db-c6tfn" Feb 27 08:42:48 crc kubenswrapper[4906]: I0227 08:42:48.985345 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9qb6\" (UniqueName: \"kubernetes.io/projected/53526f83-4176-421a-9043-9a7839413714-kube-api-access-x9qb6\") pod \"cert-manager-cainjector-cf98fcc89-fxldv\" (UID: \"53526f83-4176-421a-9043-9a7839413714\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-fxldv" Feb 27 08:42:49 crc kubenswrapper[4906]: I0227 08:42:49.086768 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n6rmp\" (UniqueName: \"kubernetes.io/projected/f98e1bf6-2c42-482c-9e2d-77f9fb0a572c-kube-api-access-n6rmp\") pod \"cert-manager-webhook-687f57d79b-q2shx\" (UID: \"f98e1bf6-2c42-482c-9e2d-77f9fb0a572c\") " pod="cert-manager/cert-manager-webhook-687f57d79b-q2shx" Feb 27 08:42:49 crc kubenswrapper[4906]: I0227 08:42:49.087295 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjpcx\" (UniqueName: \"kubernetes.io/projected/45013531-55f5-4b7c-88e3-71a927eaed69-kube-api-access-xjpcx\") pod \"cert-manager-858654f9db-c6tfn\" (UID: \"45013531-55f5-4b7c-88e3-71a927eaed69\") " pod="cert-manager/cert-manager-858654f9db-c6tfn" Feb 27 08:42:49 crc kubenswrapper[4906]: I0227 08:42:49.087427 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9qb6\" (UniqueName: \"kubernetes.io/projected/53526f83-4176-421a-9043-9a7839413714-kube-api-access-x9qb6\") pod \"cert-manager-cainjector-cf98fcc89-fxldv\" (UID: \"53526f83-4176-421a-9043-9a7839413714\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-fxldv" Feb 27 08:42:49 crc kubenswrapper[4906]: I0227 08:42:49.111804 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9qb6\" (UniqueName: \"kubernetes.io/projected/53526f83-4176-421a-9043-9a7839413714-kube-api-access-x9qb6\") pod \"cert-manager-cainjector-cf98fcc89-fxldv\" (UID: \"53526f83-4176-421a-9043-9a7839413714\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-fxldv" Feb 27 08:42:49 crc kubenswrapper[4906]: I0227 08:42:49.115666 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjpcx\" (UniqueName: \"kubernetes.io/projected/45013531-55f5-4b7c-88e3-71a927eaed69-kube-api-access-xjpcx\") pod \"cert-manager-858654f9db-c6tfn\" (UID: \"45013531-55f5-4b7c-88e3-71a927eaed69\") " pod="cert-manager/cert-manager-858654f9db-c6tfn" Feb 27 08:42:49 crc kubenswrapper[4906]: I0227 08:42:49.188322 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n6rmp\" (UniqueName: \"kubernetes.io/projected/f98e1bf6-2c42-482c-9e2d-77f9fb0a572c-kube-api-access-n6rmp\") pod \"cert-manager-webhook-687f57d79b-q2shx\" (UID: \"f98e1bf6-2c42-482c-9e2d-77f9fb0a572c\") " pod="cert-manager/cert-manager-webhook-687f57d79b-q2shx" Feb 27 08:42:49 crc kubenswrapper[4906]: I0227 08:42:49.209698 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n6rmp\" (UniqueName: \"kubernetes.io/projected/f98e1bf6-2c42-482c-9e2d-77f9fb0a572c-kube-api-access-n6rmp\") pod \"cert-manager-webhook-687f57d79b-q2shx\" (UID: \"f98e1bf6-2c42-482c-9e2d-77f9fb0a572c\") " pod="cert-manager/cert-manager-webhook-687f57d79b-q2shx" Feb 27 08:42:49 crc kubenswrapper[4906]: I0227 08:42:49.264657 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-fxldv" Feb 27 08:42:49 crc kubenswrapper[4906]: I0227 08:42:49.282708 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-c6tfn" Feb 27 08:42:49 crc kubenswrapper[4906]: I0227 08:42:49.294251 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-q2shx" Feb 27 08:42:49 crc kubenswrapper[4906]: I0227 08:42:49.507913 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-fxldv"] Feb 27 08:42:49 crc kubenswrapper[4906]: I0227 08:42:49.518171 4906 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 27 08:42:49 crc kubenswrapper[4906]: I0227 08:42:49.764014 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-q2shx"] Feb 27 08:42:49 crc kubenswrapper[4906]: I0227 08:42:49.766087 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-c6tfn"] Feb 27 08:42:49 crc kubenswrapper[4906]: W0227 08:42:49.774255 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf98e1bf6_2c42_482c_9e2d_77f9fb0a572c.slice/crio-17b5df1ac400a29b9164d20a72adc715cd3fb37883a389c74918bf2f7eb858bc WatchSource:0}: Error finding container 17b5df1ac400a29b9164d20a72adc715cd3fb37883a389c74918bf2f7eb858bc: Status 404 returned error can't find the container with id 17b5df1ac400a29b9164d20a72adc715cd3fb37883a389c74918bf2f7eb858bc Feb 27 08:42:49 crc kubenswrapper[4906]: I0227 08:42:49.990147 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-c6tfn" event={"ID":"45013531-55f5-4b7c-88e3-71a927eaed69","Type":"ContainerStarted","Data":"06f91f938634522c1869cf1ed6699d3b5cb8c4a16ac795f6c562b7c1510a7573"} Feb 27 08:42:49 crc kubenswrapper[4906]: I0227 08:42:49.993486 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-fxldv" event={"ID":"53526f83-4176-421a-9043-9a7839413714","Type":"ContainerStarted","Data":"e44a44cf9cfda19fd6986251335736aac809bf5c38199fdadd8344642d7a0c61"} Feb 27 08:42:49 crc kubenswrapper[4906]: I0227 08:42:49.994953 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-q2shx" event={"ID":"f98e1bf6-2c42-482c-9e2d-77f9fb0a572c","Type":"ContainerStarted","Data":"17b5df1ac400a29b9164d20a72adc715cd3fb37883a389c74918bf2f7eb858bc"} Feb 27 08:42:53 crc kubenswrapper[4906]: I0227 08:42:53.016278 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-fxldv" event={"ID":"53526f83-4176-421a-9043-9a7839413714","Type":"ContainerStarted","Data":"e2d2273191ce42849d79402f7d2f55cf34a2d340dfa68493e04f8dd2c37e7290"} Feb 27 08:42:53 crc kubenswrapper[4906]: I0227 08:42:53.040840 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-cf98fcc89-fxldv" podStartSLOduration=2.712532982 podStartE2EDuration="5.040812484s" podCreationTimestamp="2026-02-27 08:42:48 +0000 UTC" firstStartedPulling="2026-02-27 08:42:49.51793078 +0000 UTC m=+867.912332390" lastFinishedPulling="2026-02-27 08:42:51.846210282 +0000 UTC m=+870.240611892" observedRunningTime="2026-02-27 08:42:53.040589938 +0000 UTC m=+871.434991558" watchObservedRunningTime="2026-02-27 08:42:53.040812484 +0000 UTC m=+871.435214094" Feb 27 08:42:54 crc kubenswrapper[4906]: I0227 08:42:54.025011 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-q2shx" event={"ID":"f98e1bf6-2c42-482c-9e2d-77f9fb0a572c","Type":"ContainerStarted","Data":"6657609cdd6f5ad1325245b11f97949c21448f4d12aa84fdc991e7be74fba110"} Feb 27 08:42:54 crc kubenswrapper[4906]: I0227 08:42:54.025456 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-687f57d79b-q2shx" Feb 27 08:42:54 crc kubenswrapper[4906]: I0227 08:42:54.030282 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-c6tfn" event={"ID":"45013531-55f5-4b7c-88e3-71a927eaed69","Type":"ContainerStarted","Data":"76699845c7911bea0ace4a13207237c3a409f5e4a3f5c40010dd30606a108b30"} Feb 27 08:42:54 crc kubenswrapper[4906]: I0227 08:42:54.045953 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-687f57d79b-q2shx" podStartSLOduration=2.343936385 podStartE2EDuration="6.04592683s" podCreationTimestamp="2026-02-27 08:42:48 +0000 UTC" firstStartedPulling="2026-02-27 08:42:49.778686755 +0000 UTC m=+868.173088365" lastFinishedPulling="2026-02-27 08:42:53.48067719 +0000 UTC m=+871.875078810" observedRunningTime="2026-02-27 08:42:54.041778258 +0000 UTC m=+872.436179888" watchObservedRunningTime="2026-02-27 08:42:54.04592683 +0000 UTC m=+872.440328450" Feb 27 08:42:54 crc kubenswrapper[4906]: I0227 08:42:54.069937 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-858654f9db-c6tfn" podStartSLOduration=2.330029672 podStartE2EDuration="6.069914562s" podCreationTimestamp="2026-02-27 08:42:48 +0000 UTC" firstStartedPulling="2026-02-27 08:42:49.787735768 +0000 UTC m=+868.182137378" lastFinishedPulling="2026-02-27 08:42:53.527620658 +0000 UTC m=+871.922022268" observedRunningTime="2026-02-27 08:42:54.064473168 +0000 UTC m=+872.458874788" watchObservedRunningTime="2026-02-27 08:42:54.069914562 +0000 UTC m=+872.464316172" Feb 27 08:42:54 crc kubenswrapper[4906]: I0227 08:42:54.844859 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:42:54 crc kubenswrapper[4906]: I0227 08:42:54.844958 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:42:56 crc kubenswrapper[4906]: I0227 08:42:56.210700 4906 scope.go:117] "RemoveContainer" containerID="6cd34b41613ac3c336c40aa2ee3bfa66a88070b4c468ea66a4d5249aafd5cfb5" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.045773 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-lck5x"] Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.049715 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovn-controller" containerID="cri-o://adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526" gracePeriod=30 Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.049789 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="sbdb" containerID="cri-o://5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c" gracePeriod=30 Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.049852 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="nbdb" containerID="cri-o://a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7" gracePeriod=30 Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.049973 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c" gracePeriod=30 Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.049807 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="northd" containerID="cri-o://f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f" gracePeriod=30 Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.050108 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovn-acl-logging" containerID="cri-o://4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28" gracePeriod=30 Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.050101 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="kube-rbac-proxy-node" containerID="cri-o://32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887" gracePeriod=30 Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.103739 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovnkube-controller" containerID="cri-o://1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681" gracePeriod=30 Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.297729 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-687f57d79b-q2shx" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.389333 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovnkube-controller/3.log" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.389915 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-acl-logging/1.log" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.392754 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-acl-logging/0.log" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.393678 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-controller/0.log" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.394404 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.437182 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-run-ovn-kubernetes\") pod \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.437276 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-env-overrides\") pod \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.437390 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" (UID: "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.437706 4906 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.437994 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" (UID: "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.454364 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-dxbrw"] Feb 27 08:42:59 crc kubenswrapper[4906]: E0227 08:42:59.455052 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovn-acl-logging" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.455150 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovn-acl-logging" Feb 27 08:42:59 crc kubenswrapper[4906]: E0227 08:42:59.455227 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovn-acl-logging" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.455290 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovn-acl-logging" Feb 27 08:42:59 crc kubenswrapper[4906]: E0227 08:42:59.455359 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="kube-rbac-proxy-node" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.455431 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="kube-rbac-proxy-node" Feb 27 08:42:59 crc kubenswrapper[4906]: E0227 08:42:59.455502 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="kubecfg-setup" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.455582 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="kubecfg-setup" Feb 27 08:42:59 crc kubenswrapper[4906]: E0227 08:42:59.455650 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="sbdb" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.455712 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="sbdb" Feb 27 08:42:59 crc kubenswrapper[4906]: E0227 08:42:59.455777 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovnkube-controller" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.455848 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovnkube-controller" Feb 27 08:42:59 crc kubenswrapper[4906]: E0227 08:42:59.455960 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovnkube-controller" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.456032 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovnkube-controller" Feb 27 08:42:59 crc kubenswrapper[4906]: E0227 08:42:59.456103 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovnkube-controller" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.456180 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovnkube-controller" Feb 27 08:42:59 crc kubenswrapper[4906]: E0227 08:42:59.456253 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="nbdb" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.456325 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="nbdb" Feb 27 08:42:59 crc kubenswrapper[4906]: E0227 08:42:59.456398 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="kube-rbac-proxy-ovn-metrics" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.456467 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="kube-rbac-proxy-ovn-metrics" Feb 27 08:42:59 crc kubenswrapper[4906]: E0227 08:42:59.456544 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovn-controller" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.456613 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovn-controller" Feb 27 08:42:59 crc kubenswrapper[4906]: E0227 08:42:59.456688 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="northd" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.456750 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="northd" Feb 27 08:42:59 crc kubenswrapper[4906]: E0227 08:42:59.456911 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovnkube-controller" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.456991 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovnkube-controller" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.457176 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="kube-rbac-proxy-ovn-metrics" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.457257 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="sbdb" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.457587 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovn-acl-logging" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.457744 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovnkube-controller" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.457934 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovn-acl-logging" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.458101 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovn-controller" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.458269 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovnkube-controller" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.458428 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovnkube-controller" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.458590 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovnkube-controller" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.458749 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="northd" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.458935 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="nbdb" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.459112 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="kube-rbac-proxy-node" Feb 27 08:42:59 crc kubenswrapper[4906]: E0227 08:42:59.459417 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovnkube-controller" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.459587 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovnkube-controller" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.459866 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerName="ovnkube-controller" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.462073 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.538749 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-kubelet\") pod \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.538800 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-ovnkube-config\") pod \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.538828 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-ovnkube-script-lib\") pod \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.538843 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-systemd-units\") pod \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.538860 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-ovn-node-metrics-cert\") pod \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.538890 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-slash\") pod \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.538904 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" (UID: "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.538917 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-run-systemd\") pod \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.538960 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-run-ovn\") pod \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.538984 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-etc-openvswitch\") pod \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539012 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-cni-bin\") pod \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539039 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-run-netns\") pod \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539065 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l5jtf\" (UniqueName: \"kubernetes.io/projected/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-kube-api-access-l5jtf\") pod \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539091 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" (UID: "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539115 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-var-lib-openvswitch\") pod \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539137 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-log-socket\") pod \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539164 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-var-lib-cni-networks-ovn-kubernetes\") pod \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539187 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-node-log\") pod \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539213 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-run-openvswitch\") pod \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539234 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-cni-netd\") pod \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\" (UID: \"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2\") " Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539333 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-cni-netd\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539360 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-run-netns\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539413 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" (UID: "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539416 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-run-ovn-kubernetes\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539412 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-slash" (OuterVolumeSpecName: "host-slash") pod "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" (UID: "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539460 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" (UID: "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539469 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-node-log\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539497 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-node-log" (OuterVolumeSpecName: "node-log") pod "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" (UID: "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539503 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-systemd-units\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539526 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d1384200-b85e-4e35-a8a8-4fa5d0eed495-env-overrides\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539553 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-cni-bin\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539501 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" (UID: "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539527 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" (UID: "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539539 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" (UID: "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539540 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" (UID: "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539551 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" (UID: "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539539 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-log-socket" (OuterVolumeSpecName: "log-socket") pod "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" (UID: "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539578 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" (UID: "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539588 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" (UID: "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539760 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-var-lib-openvswitch\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539941 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6x9hm\" (UniqueName: \"kubernetes.io/projected/d1384200-b85e-4e35-a8a8-4fa5d0eed495-kube-api-access-6x9hm\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.539967 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" (UID: "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540075 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-run-openvswitch\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540122 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d1384200-b85e-4e35-a8a8-4fa5d0eed495-ovnkube-config\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540150 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-slash\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540177 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-run-systemd\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540329 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-log-socket\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540384 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-run-ovn\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540412 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d1384200-b85e-4e35-a8a8-4fa5d0eed495-ovn-node-metrics-cert\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540496 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540518 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d1384200-b85e-4e35-a8a8-4fa5d0eed495-ovnkube-script-lib\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540553 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-kubelet\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540591 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-etc-openvswitch\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540793 4906 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-kubelet\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540812 4906 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540840 4906 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540852 4906 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-systemd-units\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540865 4906 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-slash\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540891 4906 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540903 4906 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540918 4906 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-cni-bin\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540927 4906 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-run-netns\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540938 4906 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540948 4906 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-log-socket\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540961 4906 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540974 4906 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-node-log\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540983 4906 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-run-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.540993 4906 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.541001 4906 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-host-cni-netd\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.546299 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" (UID: "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.547115 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-kube-api-access-l5jtf" (OuterVolumeSpecName: "kube-api-access-l5jtf") pod "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" (UID: "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2"). InnerVolumeSpecName "kube-api-access-l5jtf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.557948 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" (UID: "0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.642425 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-log-socket\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.642518 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-run-ovn\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.642590 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d1384200-b85e-4e35-a8a8-4fa5d0eed495-ovn-node-metrics-cert\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.642609 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-log-socket\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.642637 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d1384200-b85e-4e35-a8a8-4fa5d0eed495-ovnkube-script-lib\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.642736 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.642773 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-kubelet\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.642810 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-etc-openvswitch\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.642842 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-cni-netd\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.642856 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.642871 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-run-netns\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.642918 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-kubelet\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.642945 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-run-ovn-kubernetes\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.642968 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-run-netns\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.642973 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-cni-netd\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.642934 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-etc-openvswitch\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643011 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-run-ovn-kubernetes\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643021 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-node-log\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643048 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-node-log\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643121 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-systemd-units\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643149 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d1384200-b85e-4e35-a8a8-4fa5d0eed495-env-overrides\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643218 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-cni-bin\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643318 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-var-lib-openvswitch\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643348 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6x9hm\" (UniqueName: \"kubernetes.io/projected/d1384200-b85e-4e35-a8a8-4fa5d0eed495-kube-api-access-6x9hm\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643394 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-systemd-units\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643480 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-run-openvswitch\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643416 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-run-openvswitch\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643583 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-run-systemd\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643621 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d1384200-b85e-4e35-a8a8-4fa5d0eed495-ovnkube-config\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643669 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-slash\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643685 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-run-systemd\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643824 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-slash\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643833 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-host-cni-bin\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643866 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-var-lib-openvswitch\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643911 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l5jtf\" (UniqueName: \"kubernetes.io/projected/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-kube-api-access-l5jtf\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643933 4906 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643941 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/d1384200-b85e-4e35-a8a8-4fa5d0eed495-run-ovn\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.643946 4906 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2-run-systemd\") on node \"crc\" DevicePath \"\"" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.644119 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/d1384200-b85e-4e35-a8a8-4fa5d0eed495-ovnkube-script-lib\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.644591 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/d1384200-b85e-4e35-a8a8-4fa5d0eed495-ovnkube-config\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.644664 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/d1384200-b85e-4e35-a8a8-4fa5d0eed495-env-overrides\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.646432 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/d1384200-b85e-4e35-a8a8-4fa5d0eed495-ovn-node-metrics-cert\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.661668 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6x9hm\" (UniqueName: \"kubernetes.io/projected/d1384200-b85e-4e35-a8a8-4fa5d0eed495-kube-api-access-6x9hm\") pod \"ovnkube-node-dxbrw\" (UID: \"d1384200-b85e-4e35-a8a8-4fa5d0eed495\") " pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:42:59 crc kubenswrapper[4906]: I0227 08:42:59.777601 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.082988 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovnkube-controller/3.log" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.084527 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-acl-logging/1.log" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.087110 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-acl-logging/0.log" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.087647 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-lck5x_0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/ovn-controller/0.log" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088149 4906 generic.go:334] "Generic (PLEG): container finished" podID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerID="1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681" exitCode=0 Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088177 4906 generic.go:334] "Generic (PLEG): container finished" podID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerID="4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28" exitCode=143 Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088186 4906 generic.go:334] "Generic (PLEG): container finished" podID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerID="5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c" exitCode=0 Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088195 4906 generic.go:334] "Generic (PLEG): container finished" podID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerID="a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7" exitCode=0 Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088203 4906 generic.go:334] "Generic (PLEG): container finished" podID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerID="f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f" exitCode=0 Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088211 4906 generic.go:334] "Generic (PLEG): container finished" podID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerID="210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c" exitCode=0 Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088219 4906 generic.go:334] "Generic (PLEG): container finished" podID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerID="32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887" exitCode=0 Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088227 4906 generic.go:334] "Generic (PLEG): container finished" podID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" containerID="adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526" exitCode=143 Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088285 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerDied","Data":"1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088319 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerDied","Data":"4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088332 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerDied","Data":"5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088344 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerDied","Data":"a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088356 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerDied","Data":"f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088369 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088380 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088386 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088392 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088400 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerDied","Data":"210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088397 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088422 4906 scope.go:117] "RemoveContainer" containerID="1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088409 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088587 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088596 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088602 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088607 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088614 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088620 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088625 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088631 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088636 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088642 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088650 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerDied","Data":"32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088660 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088666 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088671 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088677 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088682 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088688 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088700 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088705 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088711 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088716 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088722 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088728 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerDied","Data":"adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088736 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088743 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088748 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088754 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088759 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088764 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088769 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088775 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088780 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088785 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088790 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088796 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-lck5x" event={"ID":"0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2","Type":"ContainerDied","Data":"5f72107f76721706132c879200ae447ff3d514374d272267af6fc87de4400532"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088805 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088812 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088819 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088824 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088849 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088854 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088860 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088866 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088871 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088895 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.088902 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.092352 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6nxxh_a961de01-e505-4c80-96a0-333da958a633/kube-multus/2.log" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.092744 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6nxxh_a961de01-e505-4c80-96a0-333da958a633/kube-multus/1.log" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.092778 4906 generic.go:334] "Generic (PLEG): container finished" podID="a961de01-e505-4c80-96a0-333da958a633" containerID="97b39484f9fe72c1088867368903db2cbdc36bc29698fe7c9c901230d5e6bf42" exitCode=2 Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.092852 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-6nxxh" event={"ID":"a961de01-e505-4c80-96a0-333da958a633","Type":"ContainerDied","Data":"97b39484f9fe72c1088867368903db2cbdc36bc29698fe7c9c901230d5e6bf42"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.092872 4906 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"41eedc8fb57fff9ee8587cf308c6bc779385fe45c289d2450f5039e83c71b349"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.093663 4906 scope.go:117] "RemoveContainer" containerID="97b39484f9fe72c1088867368903db2cbdc36bc29698fe7c9c901230d5e6bf42" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.096617 4906 generic.go:334] "Generic (PLEG): container finished" podID="d1384200-b85e-4e35-a8a8-4fa5d0eed495" containerID="0ff7284c2296953ea8fc9a9703e45ed74d5fac76f565b0ee06b4abd78c408d77" exitCode=0 Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.096670 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" event={"ID":"d1384200-b85e-4e35-a8a8-4fa5d0eed495","Type":"ContainerDied","Data":"0ff7284c2296953ea8fc9a9703e45ed74d5fac76f565b0ee06b4abd78c408d77"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.096709 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" event={"ID":"d1384200-b85e-4e35-a8a8-4fa5d0eed495","Type":"ContainerStarted","Data":"a73c65763df1b739e75e64bdb7cf0d544cdd90ba169c76a1c6a87fd14ef30b44"} Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.107953 4906 scope.go:117] "RemoveContainer" containerID="bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.136547 4906 scope.go:117] "RemoveContainer" containerID="4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.143850 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-lck5x"] Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.150370 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-lck5x"] Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.169124 4906 scope.go:117] "RemoveContainer" containerID="5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.193852 4906 scope.go:117] "RemoveContainer" containerID="a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.254682 4906 scope.go:117] "RemoveContainer" containerID="f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.270350 4906 scope.go:117] "RemoveContainer" containerID="210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.292018 4906 scope.go:117] "RemoveContainer" containerID="32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.306310 4906 scope.go:117] "RemoveContainer" containerID="78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.325219 4906 scope.go:117] "RemoveContainer" containerID="adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.346537 4906 scope.go:117] "RemoveContainer" containerID="ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.369355 4906 scope.go:117] "RemoveContainer" containerID="1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681" Feb 27 08:43:00 crc kubenswrapper[4906]: E0227 08:43:00.370097 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681\": container with ID starting with 1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681 not found: ID does not exist" containerID="1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.370144 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681"} err="failed to get container status \"1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681\": rpc error: code = NotFound desc = could not find container \"1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681\": container with ID starting with 1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.370181 4906 scope.go:117] "RemoveContainer" containerID="bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18" Feb 27 08:43:00 crc kubenswrapper[4906]: E0227 08:43:00.370500 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18\": container with ID starting with bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18 not found: ID does not exist" containerID="bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.370526 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18"} err="failed to get container status \"bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18\": rpc error: code = NotFound desc = could not find container \"bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18\": container with ID starting with bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.370540 4906 scope.go:117] "RemoveContainer" containerID="4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28" Feb 27 08:43:00 crc kubenswrapper[4906]: E0227 08:43:00.370798 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\": container with ID starting with 4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28 not found: ID does not exist" containerID="4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.370824 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28"} err="failed to get container status \"4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\": rpc error: code = NotFound desc = could not find container \"4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\": container with ID starting with 4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.370841 4906 scope.go:117] "RemoveContainer" containerID="5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c" Feb 27 08:43:00 crc kubenswrapper[4906]: E0227 08:43:00.371460 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\": container with ID starting with 5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c not found: ID does not exist" containerID="5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.371489 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c"} err="failed to get container status \"5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\": rpc error: code = NotFound desc = could not find container \"5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\": container with ID starting with 5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.371509 4906 scope.go:117] "RemoveContainer" containerID="a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7" Feb 27 08:43:00 crc kubenswrapper[4906]: E0227 08:43:00.371895 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\": container with ID starting with a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7 not found: ID does not exist" containerID="a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.371923 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7"} err="failed to get container status \"a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\": rpc error: code = NotFound desc = could not find container \"a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\": container with ID starting with a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.371941 4906 scope.go:117] "RemoveContainer" containerID="f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f" Feb 27 08:43:00 crc kubenswrapper[4906]: E0227 08:43:00.372397 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\": container with ID starting with f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f not found: ID does not exist" containerID="f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.372428 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f"} err="failed to get container status \"f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\": rpc error: code = NotFound desc = could not find container \"f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\": container with ID starting with f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.372449 4906 scope.go:117] "RemoveContainer" containerID="210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c" Feb 27 08:43:00 crc kubenswrapper[4906]: E0227 08:43:00.373109 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\": container with ID starting with 210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c not found: ID does not exist" containerID="210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.373174 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c"} err="failed to get container status \"210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\": rpc error: code = NotFound desc = could not find container \"210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\": container with ID starting with 210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.373193 4906 scope.go:117] "RemoveContainer" containerID="32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887" Feb 27 08:43:00 crc kubenswrapper[4906]: E0227 08:43:00.373540 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\": container with ID starting with 32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887 not found: ID does not exist" containerID="32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.373565 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887"} err="failed to get container status \"32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\": rpc error: code = NotFound desc = could not find container \"32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\": container with ID starting with 32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.373581 4906 scope.go:117] "RemoveContainer" containerID="78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae" Feb 27 08:43:00 crc kubenswrapper[4906]: E0227 08:43:00.373984 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\": container with ID starting with 78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae not found: ID does not exist" containerID="78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.374006 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae"} err="failed to get container status \"78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\": rpc error: code = NotFound desc = could not find container \"78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\": container with ID starting with 78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.374018 4906 scope.go:117] "RemoveContainer" containerID="adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526" Feb 27 08:43:00 crc kubenswrapper[4906]: E0227 08:43:00.374480 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\": container with ID starting with adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526 not found: ID does not exist" containerID="adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.374519 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526"} err="failed to get container status \"adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\": rpc error: code = NotFound desc = could not find container \"adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\": container with ID starting with adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.374536 4906 scope.go:117] "RemoveContainer" containerID="ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88" Feb 27 08:43:00 crc kubenswrapper[4906]: E0227 08:43:00.374820 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\": container with ID starting with ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88 not found: ID does not exist" containerID="ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.374851 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88"} err="failed to get container status \"ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\": rpc error: code = NotFound desc = could not find container \"ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\": container with ID starting with ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.374867 4906 scope.go:117] "RemoveContainer" containerID="1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.375304 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681"} err="failed to get container status \"1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681\": rpc error: code = NotFound desc = could not find container \"1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681\": container with ID starting with 1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.375332 4906 scope.go:117] "RemoveContainer" containerID="bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.375618 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18"} err="failed to get container status \"bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18\": rpc error: code = NotFound desc = could not find container \"bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18\": container with ID starting with bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.375640 4906 scope.go:117] "RemoveContainer" containerID="4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.377097 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28"} err="failed to get container status \"4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\": rpc error: code = NotFound desc = could not find container \"4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\": container with ID starting with 4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.377151 4906 scope.go:117] "RemoveContainer" containerID="5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.377539 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c"} err="failed to get container status \"5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\": rpc error: code = NotFound desc = could not find container \"5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\": container with ID starting with 5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.377588 4906 scope.go:117] "RemoveContainer" containerID="a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.378126 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7"} err="failed to get container status \"a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\": rpc error: code = NotFound desc = could not find container \"a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\": container with ID starting with a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.378168 4906 scope.go:117] "RemoveContainer" containerID="f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.378554 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f"} err="failed to get container status \"f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\": rpc error: code = NotFound desc = could not find container \"f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\": container with ID starting with f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.378587 4906 scope.go:117] "RemoveContainer" containerID="210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.378859 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c"} err="failed to get container status \"210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\": rpc error: code = NotFound desc = could not find container \"210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\": container with ID starting with 210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.378930 4906 scope.go:117] "RemoveContainer" containerID="32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.379390 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887"} err="failed to get container status \"32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\": rpc error: code = NotFound desc = could not find container \"32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\": container with ID starting with 32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.379413 4906 scope.go:117] "RemoveContainer" containerID="78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.379639 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae"} err="failed to get container status \"78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\": rpc error: code = NotFound desc = could not find container \"78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\": container with ID starting with 78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.379661 4906 scope.go:117] "RemoveContainer" containerID="adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.379993 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526"} err="failed to get container status \"adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\": rpc error: code = NotFound desc = could not find container \"adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\": container with ID starting with adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.380045 4906 scope.go:117] "RemoveContainer" containerID="ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.381809 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88"} err="failed to get container status \"ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\": rpc error: code = NotFound desc = could not find container \"ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\": container with ID starting with ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.381831 4906 scope.go:117] "RemoveContainer" containerID="1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.382271 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681"} err="failed to get container status \"1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681\": rpc error: code = NotFound desc = could not find container \"1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681\": container with ID starting with 1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.382293 4906 scope.go:117] "RemoveContainer" containerID="bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.384607 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18"} err="failed to get container status \"bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18\": rpc error: code = NotFound desc = could not find container \"bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18\": container with ID starting with bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.384629 4906 scope.go:117] "RemoveContainer" containerID="4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.385193 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28"} err="failed to get container status \"4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\": rpc error: code = NotFound desc = could not find container \"4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\": container with ID starting with 4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.385212 4906 scope.go:117] "RemoveContainer" containerID="5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.385595 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c"} err="failed to get container status \"5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\": rpc error: code = NotFound desc = could not find container \"5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\": container with ID starting with 5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.385614 4906 scope.go:117] "RemoveContainer" containerID="a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.386250 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7"} err="failed to get container status \"a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\": rpc error: code = NotFound desc = could not find container \"a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\": container with ID starting with a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.386269 4906 scope.go:117] "RemoveContainer" containerID="f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.386712 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f"} err="failed to get container status \"f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\": rpc error: code = NotFound desc = could not find container \"f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\": container with ID starting with f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.386846 4906 scope.go:117] "RemoveContainer" containerID="210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.387308 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c"} err="failed to get container status \"210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\": rpc error: code = NotFound desc = could not find container \"210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\": container with ID starting with 210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.387339 4906 scope.go:117] "RemoveContainer" containerID="32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.387988 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887"} err="failed to get container status \"32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\": rpc error: code = NotFound desc = could not find container \"32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887\": container with ID starting with 32b9479995aeb56689b770a4b13fe992aa37786df4bc4954991a26c81ca2e887 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.388010 4906 scope.go:117] "RemoveContainer" containerID="78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.388339 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae"} err="failed to get container status \"78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\": rpc error: code = NotFound desc = could not find container \"78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae\": container with ID starting with 78c89aa2974bf9b8688e94d277b0569385cab3e1cd4e351b28ce8f13d00fd3ae not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.388362 4906 scope.go:117] "RemoveContainer" containerID="adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.388731 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526"} err="failed to get container status \"adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\": rpc error: code = NotFound desc = could not find container \"adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526\": container with ID starting with adf4f799f9fb5ae8d5cc8b46534918dbd5bc707e61bd9996bd77e88009bee526 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.388753 4906 scope.go:117] "RemoveContainer" containerID="ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.389562 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88"} err="failed to get container status \"ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\": rpc error: code = NotFound desc = could not find container \"ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88\": container with ID starting with ef282c9c725d7f69e2e8085ee22ed8b31fc3dc0b619313a4a3349c42b5661e88 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.389601 4906 scope.go:117] "RemoveContainer" containerID="1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.390030 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681"} err="failed to get container status \"1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681\": rpc error: code = NotFound desc = could not find container \"1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681\": container with ID starting with 1bf0883a41855a976b43ac7ba94916b8a970e07e5eba073befa821b6693dc681 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.390110 4906 scope.go:117] "RemoveContainer" containerID="bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.390489 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18"} err="failed to get container status \"bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18\": rpc error: code = NotFound desc = could not find container \"bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18\": container with ID starting with bf363210098980a592d37faeebffe81c7e65c49b88afd0f391fe8ba2a2b77d18 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.390511 4906 scope.go:117] "RemoveContainer" containerID="4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.390816 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28"} err="failed to get container status \"4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\": rpc error: code = NotFound desc = could not find container \"4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28\": container with ID starting with 4bd10e45bc60c58ac138c80fb7e7dd9951922d05825968b312ac3f17478b3a28 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.390834 4906 scope.go:117] "RemoveContainer" containerID="5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.391210 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c"} err="failed to get container status \"5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\": rpc error: code = NotFound desc = could not find container \"5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c\": container with ID starting with 5f1612bb353a7e4c6f4060774071acd3facfee2d1fc4a75d694a1d182500d57c not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.391236 4906 scope.go:117] "RemoveContainer" containerID="a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.391720 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7"} err="failed to get container status \"a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\": rpc error: code = NotFound desc = could not find container \"a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7\": container with ID starting with a6c69dc5035d5d1f2e919cf42bd532914c68c5698d3935a3e079706a38f227e7 not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.391740 4906 scope.go:117] "RemoveContainer" containerID="f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.392031 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f"} err="failed to get container status \"f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\": rpc error: code = NotFound desc = could not find container \"f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f\": container with ID starting with f9a8ec65b0f3776d693eb9a28ad901bc6e212d8d66e6f7eaf88756d0ab53d70f not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.392054 4906 scope.go:117] "RemoveContainer" containerID="210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.392596 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c"} err="failed to get container status \"210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\": rpc error: code = NotFound desc = could not find container \"210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c\": container with ID starting with 210b9bc5cbb170738eb8f629db97c1204b7137e5384c67a964aa5478ae4bd84c not found: ID does not exist" Feb 27 08:43:00 crc kubenswrapper[4906]: I0227 08:43:00.562491 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2" path="/var/lib/kubelet/pods/0338a8dd-b2b2-44b7-a7a2-4a2a5b7a23f2/volumes" Feb 27 08:43:01 crc kubenswrapper[4906]: I0227 08:43:01.107934 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" event={"ID":"d1384200-b85e-4e35-a8a8-4fa5d0eed495","Type":"ContainerStarted","Data":"c72c2b94f70e6d79775735ed98d1a59e53009c557b3f9bd73b3dd2776bd9055f"} Feb 27 08:43:01 crc kubenswrapper[4906]: I0227 08:43:01.108003 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" event={"ID":"d1384200-b85e-4e35-a8a8-4fa5d0eed495","Type":"ContainerStarted","Data":"c64430838408c881e5efffb9d6a851313aa857628be0b68edf7272918f69123c"} Feb 27 08:43:01 crc kubenswrapper[4906]: I0227 08:43:01.108022 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" event={"ID":"d1384200-b85e-4e35-a8a8-4fa5d0eed495","Type":"ContainerStarted","Data":"f28a3919784c3e26d8594eb0a9fc2e62853cafcd16f095ac2f782ddbbf8538f6"} Feb 27 08:43:01 crc kubenswrapper[4906]: I0227 08:43:01.108036 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" event={"ID":"d1384200-b85e-4e35-a8a8-4fa5d0eed495","Type":"ContainerStarted","Data":"d1cfc6b14f51c0c6e870b0e7855150e572dc1a8c375b4cb5efe46ad24c045c0c"} Feb 27 08:43:01 crc kubenswrapper[4906]: I0227 08:43:01.108049 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" event={"ID":"d1384200-b85e-4e35-a8a8-4fa5d0eed495","Type":"ContainerStarted","Data":"9549cdaa7cd5c755ae26bb8bdb40f9ff58f8294e0ea865a6a1a870a8352add8d"} Feb 27 08:43:01 crc kubenswrapper[4906]: I0227 08:43:01.108062 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" event={"ID":"d1384200-b85e-4e35-a8a8-4fa5d0eed495","Type":"ContainerStarted","Data":"7a09d6b2914dbc76e28ff3823e1d225cd5335914d983f2cbff48b0e4579e7434"} Feb 27 08:43:01 crc kubenswrapper[4906]: I0227 08:43:01.113150 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6nxxh_a961de01-e505-4c80-96a0-333da958a633/kube-multus/2.log" Feb 27 08:43:01 crc kubenswrapper[4906]: I0227 08:43:01.113902 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6nxxh_a961de01-e505-4c80-96a0-333da958a633/kube-multus/1.log" Feb 27 08:43:01 crc kubenswrapper[4906]: I0227 08:43:01.113940 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-6nxxh" event={"ID":"a961de01-e505-4c80-96a0-333da958a633","Type":"ContainerStarted","Data":"4d2b4372ca141d7c4acc3a1a3ee7ae0c768096c78cccd9f98af818849d973c32"} Feb 27 08:43:04 crc kubenswrapper[4906]: I0227 08:43:04.155402 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" event={"ID":"d1384200-b85e-4e35-a8a8-4fa5d0eed495","Type":"ContainerStarted","Data":"ef0a28844e5fae4e43e8057ba24d99953adacfd97039dc4a76a5d82411517a5e"} Feb 27 08:43:06 crc kubenswrapper[4906]: I0227 08:43:06.170033 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" event={"ID":"d1384200-b85e-4e35-a8a8-4fa5d0eed495","Type":"ContainerStarted","Data":"9a3fd34d9c58f461cf954591a387bc6dc59a24bf4cb56792a8418768823820fb"} Feb 27 08:43:06 crc kubenswrapper[4906]: I0227 08:43:06.172824 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:43:06 crc kubenswrapper[4906]: I0227 08:43:06.172849 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:43:06 crc kubenswrapper[4906]: I0227 08:43:06.172866 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:43:06 crc kubenswrapper[4906]: I0227 08:43:06.202410 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:43:06 crc kubenswrapper[4906]: I0227 08:43:06.203988 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:43:06 crc kubenswrapper[4906]: I0227 08:43:06.206468 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" podStartSLOduration=7.206435888 podStartE2EDuration="7.206435888s" podCreationTimestamp="2026-02-27 08:42:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:43:06.197223291 +0000 UTC m=+884.591624901" watchObservedRunningTime="2026-02-27 08:43:06.206435888 +0000 UTC m=+884.600837498" Feb 27 08:43:24 crc kubenswrapper[4906]: I0227 08:43:24.845089 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:43:24 crc kubenswrapper[4906]: I0227 08:43:24.846361 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:43:29 crc kubenswrapper[4906]: I0227 08:43:29.819834 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-dxbrw" Feb 27 08:43:35 crc kubenswrapper[4906]: I0227 08:43:35.932735 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg"] Feb 27 08:43:35 crc kubenswrapper[4906]: I0227 08:43:35.934728 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg" Feb 27 08:43:35 crc kubenswrapper[4906]: I0227 08:43:35.937351 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 27 08:43:35 crc kubenswrapper[4906]: I0227 08:43:35.944510 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg"] Feb 27 08:43:35 crc kubenswrapper[4906]: I0227 08:43:35.996581 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wfjr\" (UniqueName: \"kubernetes.io/projected/4be2276e-319a-41b2-afe6-40b807bb398a-kube-api-access-6wfjr\") pod \"0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg\" (UID: \"4be2276e-319a-41b2-afe6-40b807bb398a\") " pod="openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg" Feb 27 08:43:35 crc kubenswrapper[4906]: I0227 08:43:35.996664 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4be2276e-319a-41b2-afe6-40b807bb398a-util\") pod \"0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg\" (UID: \"4be2276e-319a-41b2-afe6-40b807bb398a\") " pod="openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg" Feb 27 08:43:35 crc kubenswrapper[4906]: I0227 08:43:35.996698 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4be2276e-319a-41b2-afe6-40b807bb398a-bundle\") pod \"0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg\" (UID: \"4be2276e-319a-41b2-afe6-40b807bb398a\") " pod="openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg" Feb 27 08:43:36 crc kubenswrapper[4906]: I0227 08:43:36.098242 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wfjr\" (UniqueName: \"kubernetes.io/projected/4be2276e-319a-41b2-afe6-40b807bb398a-kube-api-access-6wfjr\") pod \"0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg\" (UID: \"4be2276e-319a-41b2-afe6-40b807bb398a\") " pod="openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg" Feb 27 08:43:36 crc kubenswrapper[4906]: I0227 08:43:36.098614 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4be2276e-319a-41b2-afe6-40b807bb398a-util\") pod \"0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg\" (UID: \"4be2276e-319a-41b2-afe6-40b807bb398a\") " pod="openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg" Feb 27 08:43:36 crc kubenswrapper[4906]: I0227 08:43:36.098743 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4be2276e-319a-41b2-afe6-40b807bb398a-bundle\") pod \"0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg\" (UID: \"4be2276e-319a-41b2-afe6-40b807bb398a\") " pod="openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg" Feb 27 08:43:36 crc kubenswrapper[4906]: I0227 08:43:36.099290 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4be2276e-319a-41b2-afe6-40b807bb398a-util\") pod \"0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg\" (UID: \"4be2276e-319a-41b2-afe6-40b807bb398a\") " pod="openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg" Feb 27 08:43:36 crc kubenswrapper[4906]: I0227 08:43:36.099290 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4be2276e-319a-41b2-afe6-40b807bb398a-bundle\") pod \"0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg\" (UID: \"4be2276e-319a-41b2-afe6-40b807bb398a\") " pod="openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg" Feb 27 08:43:37 crc kubenswrapper[4906]: I0227 08:43:37.045933 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wfjr\" (UniqueName: \"kubernetes.io/projected/4be2276e-319a-41b2-afe6-40b807bb398a-kube-api-access-6wfjr\") pod \"0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg\" (UID: \"4be2276e-319a-41b2-afe6-40b807bb398a\") " pod="openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg" Feb 27 08:43:37 crc kubenswrapper[4906]: I0227 08:43:37.153576 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg" Feb 27 08:43:37 crc kubenswrapper[4906]: I0227 08:43:37.435971 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg"] Feb 27 08:43:37 crc kubenswrapper[4906]: I0227 08:43:37.741645 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zf64x"] Feb 27 08:43:37 crc kubenswrapper[4906]: I0227 08:43:37.743804 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zf64x" Feb 27 08:43:37 crc kubenswrapper[4906]: I0227 08:43:37.761085 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zf64x"] Feb 27 08:43:37 crc kubenswrapper[4906]: I0227 08:43:37.913551 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27bb9122-6cee-4f82-af66-125124a0a2ad-catalog-content\") pod \"redhat-operators-zf64x\" (UID: \"27bb9122-6cee-4f82-af66-125124a0a2ad\") " pod="openshift-marketplace/redhat-operators-zf64x" Feb 27 08:43:37 crc kubenswrapper[4906]: I0227 08:43:37.913628 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27bb9122-6cee-4f82-af66-125124a0a2ad-utilities\") pod \"redhat-operators-zf64x\" (UID: \"27bb9122-6cee-4f82-af66-125124a0a2ad\") " pod="openshift-marketplace/redhat-operators-zf64x" Feb 27 08:43:37 crc kubenswrapper[4906]: I0227 08:43:37.913655 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4k89k\" (UniqueName: \"kubernetes.io/projected/27bb9122-6cee-4f82-af66-125124a0a2ad-kube-api-access-4k89k\") pod \"redhat-operators-zf64x\" (UID: \"27bb9122-6cee-4f82-af66-125124a0a2ad\") " pod="openshift-marketplace/redhat-operators-zf64x" Feb 27 08:43:38 crc kubenswrapper[4906]: I0227 08:43:38.015563 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4k89k\" (UniqueName: \"kubernetes.io/projected/27bb9122-6cee-4f82-af66-125124a0a2ad-kube-api-access-4k89k\") pod \"redhat-operators-zf64x\" (UID: \"27bb9122-6cee-4f82-af66-125124a0a2ad\") " pod="openshift-marketplace/redhat-operators-zf64x" Feb 27 08:43:38 crc kubenswrapper[4906]: I0227 08:43:38.015685 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27bb9122-6cee-4f82-af66-125124a0a2ad-catalog-content\") pod \"redhat-operators-zf64x\" (UID: \"27bb9122-6cee-4f82-af66-125124a0a2ad\") " pod="openshift-marketplace/redhat-operators-zf64x" Feb 27 08:43:38 crc kubenswrapper[4906]: I0227 08:43:38.015746 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27bb9122-6cee-4f82-af66-125124a0a2ad-utilities\") pod \"redhat-operators-zf64x\" (UID: \"27bb9122-6cee-4f82-af66-125124a0a2ad\") " pod="openshift-marketplace/redhat-operators-zf64x" Feb 27 08:43:38 crc kubenswrapper[4906]: I0227 08:43:38.016484 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27bb9122-6cee-4f82-af66-125124a0a2ad-utilities\") pod \"redhat-operators-zf64x\" (UID: \"27bb9122-6cee-4f82-af66-125124a0a2ad\") " pod="openshift-marketplace/redhat-operators-zf64x" Feb 27 08:43:38 crc kubenswrapper[4906]: I0227 08:43:38.016524 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27bb9122-6cee-4f82-af66-125124a0a2ad-catalog-content\") pod \"redhat-operators-zf64x\" (UID: \"27bb9122-6cee-4f82-af66-125124a0a2ad\") " pod="openshift-marketplace/redhat-operators-zf64x" Feb 27 08:43:38 crc kubenswrapper[4906]: I0227 08:43:38.028812 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg" event={"ID":"4be2276e-319a-41b2-afe6-40b807bb398a","Type":"ContainerStarted","Data":"9ee615dc1bc352db1962ceadefce55d036418c24d7555e0a5285a869b7be6172"} Feb 27 08:43:38 crc kubenswrapper[4906]: I0227 08:43:38.028947 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg" event={"ID":"4be2276e-319a-41b2-afe6-40b807bb398a","Type":"ContainerStarted","Data":"04fe71c3841124efe31b736c1da6f7e94b6bdf68cc5b6443bc52c97390a87a10"} Feb 27 08:43:38 crc kubenswrapper[4906]: I0227 08:43:38.036443 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4k89k\" (UniqueName: \"kubernetes.io/projected/27bb9122-6cee-4f82-af66-125124a0a2ad-kube-api-access-4k89k\") pod \"redhat-operators-zf64x\" (UID: \"27bb9122-6cee-4f82-af66-125124a0a2ad\") " pod="openshift-marketplace/redhat-operators-zf64x" Feb 27 08:43:38 crc kubenswrapper[4906]: I0227 08:43:38.069717 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zf64x" Feb 27 08:43:38 crc kubenswrapper[4906]: I0227 08:43:38.284040 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zf64x"] Feb 27 08:43:39 crc kubenswrapper[4906]: I0227 08:43:39.036980 4906 generic.go:334] "Generic (PLEG): container finished" podID="27bb9122-6cee-4f82-af66-125124a0a2ad" containerID="548b2faa201984afaf449cd8378c54e9834c1f2e62af33a0c8a75dfe62c9887d" exitCode=0 Feb 27 08:43:39 crc kubenswrapper[4906]: I0227 08:43:39.037102 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zf64x" event={"ID":"27bb9122-6cee-4f82-af66-125124a0a2ad","Type":"ContainerDied","Data":"548b2faa201984afaf449cd8378c54e9834c1f2e62af33a0c8a75dfe62c9887d"} Feb 27 08:43:39 crc kubenswrapper[4906]: I0227 08:43:39.037186 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zf64x" event={"ID":"27bb9122-6cee-4f82-af66-125124a0a2ad","Type":"ContainerStarted","Data":"2a3f535f9f2f5a021c528803d05cf2d68982a2f2587340073f4cfb5ca9b9f638"} Feb 27 08:43:39 crc kubenswrapper[4906]: I0227 08:43:39.038997 4906 generic.go:334] "Generic (PLEG): container finished" podID="4be2276e-319a-41b2-afe6-40b807bb398a" containerID="9ee615dc1bc352db1962ceadefce55d036418c24d7555e0a5285a869b7be6172" exitCode=0 Feb 27 08:43:39 crc kubenswrapper[4906]: I0227 08:43:39.039045 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg" event={"ID":"4be2276e-319a-41b2-afe6-40b807bb398a","Type":"ContainerDied","Data":"9ee615dc1bc352db1962ceadefce55d036418c24d7555e0a5285a869b7be6172"} Feb 27 08:43:41 crc kubenswrapper[4906]: I0227 08:43:41.058552 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zf64x" event={"ID":"27bb9122-6cee-4f82-af66-125124a0a2ad","Type":"ContainerStarted","Data":"75d18bb7151e700f84257a822388bd87806029bb4b55dc303fc9aabb9e4200fe"} Feb 27 08:43:42 crc kubenswrapper[4906]: I0227 08:43:42.069258 4906 generic.go:334] "Generic (PLEG): container finished" podID="27bb9122-6cee-4f82-af66-125124a0a2ad" containerID="75d18bb7151e700f84257a822388bd87806029bb4b55dc303fc9aabb9e4200fe" exitCode=0 Feb 27 08:43:42 crc kubenswrapper[4906]: I0227 08:43:42.069368 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zf64x" event={"ID":"27bb9122-6cee-4f82-af66-125124a0a2ad","Type":"ContainerDied","Data":"75d18bb7151e700f84257a822388bd87806029bb4b55dc303fc9aabb9e4200fe"} Feb 27 08:43:42 crc kubenswrapper[4906]: I0227 08:43:42.076037 4906 generic.go:334] "Generic (PLEG): container finished" podID="4be2276e-319a-41b2-afe6-40b807bb398a" containerID="9d0d34d4c0db0a92ef41d0e841fb20cfe7c0be180cb698e74fb9f55f451b0ac8" exitCode=0 Feb 27 08:43:42 crc kubenswrapper[4906]: I0227 08:43:42.076089 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg" event={"ID":"4be2276e-319a-41b2-afe6-40b807bb398a","Type":"ContainerDied","Data":"9d0d34d4c0db0a92ef41d0e841fb20cfe7c0be180cb698e74fb9f55f451b0ac8"} Feb 27 08:43:43 crc kubenswrapper[4906]: I0227 08:43:43.084624 4906 generic.go:334] "Generic (PLEG): container finished" podID="4be2276e-319a-41b2-afe6-40b807bb398a" containerID="55c91b77d3a31f56616708c9659c6c234c92892168041ad8580885f510c65167" exitCode=0 Feb 27 08:43:43 crc kubenswrapper[4906]: I0227 08:43:43.084690 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg" event={"ID":"4be2276e-319a-41b2-afe6-40b807bb398a","Type":"ContainerDied","Data":"55c91b77d3a31f56616708c9659c6c234c92892168041ad8580885f510c65167"} Feb 27 08:43:44 crc kubenswrapper[4906]: I0227 08:43:44.095838 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zf64x" event={"ID":"27bb9122-6cee-4f82-af66-125124a0a2ad","Type":"ContainerStarted","Data":"c58f57e79b347865b42876857319e8058b34ef6994cb515bf141b364b2a97616"} Feb 27 08:43:44 crc kubenswrapper[4906]: I0227 08:43:44.119304 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zf64x" podStartSLOduration=3.280336123 podStartE2EDuration="7.119272108s" podCreationTimestamp="2026-02-27 08:43:37 +0000 UTC" firstStartedPulling="2026-02-27 08:43:39.038692366 +0000 UTC m=+917.433093976" lastFinishedPulling="2026-02-27 08:43:42.877628301 +0000 UTC m=+921.272029961" observedRunningTime="2026-02-27 08:43:44.113604949 +0000 UTC m=+922.508006559" watchObservedRunningTime="2026-02-27 08:43:44.119272108 +0000 UTC m=+922.513673718" Feb 27 08:43:44 crc kubenswrapper[4906]: I0227 08:43:44.332498 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg" Feb 27 08:43:44 crc kubenswrapper[4906]: I0227 08:43:44.523722 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4be2276e-319a-41b2-afe6-40b807bb398a-util\") pod \"4be2276e-319a-41b2-afe6-40b807bb398a\" (UID: \"4be2276e-319a-41b2-afe6-40b807bb398a\") " Feb 27 08:43:44 crc kubenswrapper[4906]: I0227 08:43:44.524163 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4be2276e-319a-41b2-afe6-40b807bb398a-bundle\") pod \"4be2276e-319a-41b2-afe6-40b807bb398a\" (UID: \"4be2276e-319a-41b2-afe6-40b807bb398a\") " Feb 27 08:43:44 crc kubenswrapper[4906]: I0227 08:43:44.524293 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wfjr\" (UniqueName: \"kubernetes.io/projected/4be2276e-319a-41b2-afe6-40b807bb398a-kube-api-access-6wfjr\") pod \"4be2276e-319a-41b2-afe6-40b807bb398a\" (UID: \"4be2276e-319a-41b2-afe6-40b807bb398a\") " Feb 27 08:43:44 crc kubenswrapper[4906]: I0227 08:43:44.525108 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4be2276e-319a-41b2-afe6-40b807bb398a-bundle" (OuterVolumeSpecName: "bundle") pod "4be2276e-319a-41b2-afe6-40b807bb398a" (UID: "4be2276e-319a-41b2-afe6-40b807bb398a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:43:44 crc kubenswrapper[4906]: I0227 08:43:44.535434 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4be2276e-319a-41b2-afe6-40b807bb398a-util" (OuterVolumeSpecName: "util") pod "4be2276e-319a-41b2-afe6-40b807bb398a" (UID: "4be2276e-319a-41b2-afe6-40b807bb398a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:43:44 crc kubenswrapper[4906]: I0227 08:43:44.545871 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4be2276e-319a-41b2-afe6-40b807bb398a-kube-api-access-6wfjr" (OuterVolumeSpecName: "kube-api-access-6wfjr") pod "4be2276e-319a-41b2-afe6-40b807bb398a" (UID: "4be2276e-319a-41b2-afe6-40b807bb398a"). InnerVolumeSpecName "kube-api-access-6wfjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:43:44 crc kubenswrapper[4906]: I0227 08:43:44.626563 4906 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4be2276e-319a-41b2-afe6-40b807bb398a-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:43:44 crc kubenswrapper[4906]: I0227 08:43:44.626613 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wfjr\" (UniqueName: \"kubernetes.io/projected/4be2276e-319a-41b2-afe6-40b807bb398a-kube-api-access-6wfjr\") on node \"crc\" DevicePath \"\"" Feb 27 08:43:44 crc kubenswrapper[4906]: I0227 08:43:44.626626 4906 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4be2276e-319a-41b2-afe6-40b807bb398a-util\") on node \"crc\" DevicePath \"\"" Feb 27 08:43:45 crc kubenswrapper[4906]: I0227 08:43:45.107704 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg" Feb 27 08:43:45 crc kubenswrapper[4906]: I0227 08:43:45.107784 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg" event={"ID":"4be2276e-319a-41b2-afe6-40b807bb398a","Type":"ContainerDied","Data":"04fe71c3841124efe31b736c1da6f7e94b6bdf68cc5b6443bc52c97390a87a10"} Feb 27 08:43:45 crc kubenswrapper[4906]: I0227 08:43:45.107824 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="04fe71c3841124efe31b736c1da6f7e94b6bdf68cc5b6443bc52c97390a87a10" Feb 27 08:43:46 crc kubenswrapper[4906]: I0227 08:43:46.509184 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-75c5dccd6c-xbzkr"] Feb 27 08:43:46 crc kubenswrapper[4906]: E0227 08:43:46.510257 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4be2276e-319a-41b2-afe6-40b807bb398a" containerName="util" Feb 27 08:43:46 crc kubenswrapper[4906]: I0227 08:43:46.510334 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="4be2276e-319a-41b2-afe6-40b807bb398a" containerName="util" Feb 27 08:43:46 crc kubenswrapper[4906]: E0227 08:43:46.510416 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4be2276e-319a-41b2-afe6-40b807bb398a" containerName="extract" Feb 27 08:43:46 crc kubenswrapper[4906]: I0227 08:43:46.510490 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="4be2276e-319a-41b2-afe6-40b807bb398a" containerName="extract" Feb 27 08:43:46 crc kubenswrapper[4906]: E0227 08:43:46.510598 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4be2276e-319a-41b2-afe6-40b807bb398a" containerName="pull" Feb 27 08:43:46 crc kubenswrapper[4906]: I0227 08:43:46.510673 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="4be2276e-319a-41b2-afe6-40b807bb398a" containerName="pull" Feb 27 08:43:46 crc kubenswrapper[4906]: I0227 08:43:46.510869 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="4be2276e-319a-41b2-afe6-40b807bb398a" containerName="extract" Feb 27 08:43:46 crc kubenswrapper[4906]: I0227 08:43:46.511467 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-75c5dccd6c-xbzkr" Feb 27 08:43:46 crc kubenswrapper[4906]: I0227 08:43:46.513696 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-zw64z" Feb 27 08:43:46 crc kubenswrapper[4906]: I0227 08:43:46.513731 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Feb 27 08:43:46 crc kubenswrapper[4906]: I0227 08:43:46.513746 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Feb 27 08:43:46 crc kubenswrapper[4906]: I0227 08:43:46.521561 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-75c5dccd6c-xbzkr"] Feb 27 08:43:46 crc kubenswrapper[4906]: I0227 08:43:46.653609 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wbgm\" (UniqueName: \"kubernetes.io/projected/9cfa03e5-ea58-4c27-b123-24d93614151e-kube-api-access-5wbgm\") pod \"nmstate-operator-75c5dccd6c-xbzkr\" (UID: \"9cfa03e5-ea58-4c27-b123-24d93614151e\") " pod="openshift-nmstate/nmstate-operator-75c5dccd6c-xbzkr" Feb 27 08:43:46 crc kubenswrapper[4906]: I0227 08:43:46.754958 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wbgm\" (UniqueName: \"kubernetes.io/projected/9cfa03e5-ea58-4c27-b123-24d93614151e-kube-api-access-5wbgm\") pod \"nmstate-operator-75c5dccd6c-xbzkr\" (UID: \"9cfa03e5-ea58-4c27-b123-24d93614151e\") " pod="openshift-nmstate/nmstate-operator-75c5dccd6c-xbzkr" Feb 27 08:43:46 crc kubenswrapper[4906]: I0227 08:43:46.774094 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wbgm\" (UniqueName: \"kubernetes.io/projected/9cfa03e5-ea58-4c27-b123-24d93614151e-kube-api-access-5wbgm\") pod \"nmstate-operator-75c5dccd6c-xbzkr\" (UID: \"9cfa03e5-ea58-4c27-b123-24d93614151e\") " pod="openshift-nmstate/nmstate-operator-75c5dccd6c-xbzkr" Feb 27 08:43:46 crc kubenswrapper[4906]: I0227 08:43:46.826896 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-75c5dccd6c-xbzkr" Feb 27 08:43:47 crc kubenswrapper[4906]: I0227 08:43:47.092518 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-75c5dccd6c-xbzkr"] Feb 27 08:43:47 crc kubenswrapper[4906]: I0227 08:43:47.120018 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-75c5dccd6c-xbzkr" event={"ID":"9cfa03e5-ea58-4c27-b123-24d93614151e","Type":"ContainerStarted","Data":"76533b766c979f0cdb620e8cce621a157cda69fb2d8a6d7b07e2b4c84c7df537"} Feb 27 08:43:48 crc kubenswrapper[4906]: I0227 08:43:48.069972 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zf64x" Feb 27 08:43:48 crc kubenswrapper[4906]: I0227 08:43:48.070440 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zf64x" Feb 27 08:43:49 crc kubenswrapper[4906]: I0227 08:43:49.115288 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-zf64x" podUID="27bb9122-6cee-4f82-af66-125124a0a2ad" containerName="registry-server" probeResult="failure" output=< Feb 27 08:43:49 crc kubenswrapper[4906]: timeout: failed to connect service ":50051" within 1s Feb 27 08:43:49 crc kubenswrapper[4906]: > Feb 27 08:43:51 crc kubenswrapper[4906]: I0227 08:43:51.147202 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-75c5dccd6c-xbzkr" event={"ID":"9cfa03e5-ea58-4c27-b123-24d93614151e","Type":"ContainerStarted","Data":"2f48a8062ffc557bc90567dea6887e33b913019cfde05cc03bc144993382460c"} Feb 27 08:43:51 crc kubenswrapper[4906]: I0227 08:43:51.169435 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-75c5dccd6c-xbzkr" podStartSLOduration=2.051778581 podStartE2EDuration="5.169323282s" podCreationTimestamp="2026-02-27 08:43:46 +0000 UTC" firstStartedPulling="2026-02-27 08:43:47.107271365 +0000 UTC m=+925.501672975" lastFinishedPulling="2026-02-27 08:43:50.224816066 +0000 UTC m=+928.619217676" observedRunningTime="2026-02-27 08:43:51.163620222 +0000 UTC m=+929.558021852" watchObservedRunningTime="2026-02-27 08:43:51.169323282 +0000 UTC m=+929.563724892" Feb 27 08:43:54 crc kubenswrapper[4906]: I0227 08:43:54.845242 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:43:54 crc kubenswrapper[4906]: I0227 08:43:54.846025 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:43:54 crc kubenswrapper[4906]: I0227 08:43:54.846135 4906 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:43:54 crc kubenswrapper[4906]: I0227 08:43:54.847318 4906 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dd35a2232190406068e2b0898196cd8569373748f1f44babb52802f39e40a3ab"} pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 27 08:43:54 crc kubenswrapper[4906]: I0227 08:43:54.847457 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" containerID="cri-o://dd35a2232190406068e2b0898196cd8569373748f1f44babb52802f39e40a3ab" gracePeriod=600 Feb 27 08:43:55 crc kubenswrapper[4906]: I0227 08:43:55.174733 4906 generic.go:334] "Generic (PLEG): container finished" podID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerID="dd35a2232190406068e2b0898196cd8569373748f1f44babb52802f39e40a3ab" exitCode=0 Feb 27 08:43:55 crc kubenswrapper[4906]: I0227 08:43:55.174792 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerDied","Data":"dd35a2232190406068e2b0898196cd8569373748f1f44babb52802f39e40a3ab"} Feb 27 08:43:55 crc kubenswrapper[4906]: I0227 08:43:55.175195 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerStarted","Data":"bb55c20fecc20d21bdfb369cf4ea10a10466e88d9a7657f3958a0393ee619f89"} Feb 27 08:43:55 crc kubenswrapper[4906]: I0227 08:43:55.175226 4906 scope.go:117] "RemoveContainer" containerID="5279e6ebbd53a2bdf9a1dc599b1baf42994b0cc910b5f1d6e07817732e7c99c7" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.277228 4906 scope.go:117] "RemoveContainer" containerID="41eedc8fb57fff9ee8587cf308c6bc779385fe45c289d2450f5039e83c71b349" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.419671 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-69594cc75-9xjr4"] Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.420993 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-69594cc75-9xjr4" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.423354 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-ggqhk" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.433767 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-786f45cff4-qvmtw"] Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.434802 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-786f45cff4-qvmtw" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.438330 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.458238 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-786f45cff4-qvmtw"] Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.463241 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-69594cc75-9xjr4"] Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.485136 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-h644p"] Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.486033 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-h644p" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.539360 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x428m\" (UniqueName: \"kubernetes.io/projected/30e5e423-b09e-4ade-baa1-257731b5cc0b-kube-api-access-x428m\") pod \"nmstate-webhook-786f45cff4-qvmtw\" (UID: \"30e5e423-b09e-4ade-baa1-257731b5cc0b\") " pod="openshift-nmstate/nmstate-webhook-786f45cff4-qvmtw" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.539427 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwdhr\" (UniqueName: \"kubernetes.io/projected/5f0d3058-8b6c-45aa-84de-416a2f458647-kube-api-access-pwdhr\") pod \"nmstate-metrics-69594cc75-9xjr4\" (UID: \"5f0d3058-8b6c-45aa-84de-416a2f458647\") " pod="openshift-nmstate/nmstate-metrics-69594cc75-9xjr4" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.539460 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/30e5e423-b09e-4ade-baa1-257731b5cc0b-tls-key-pair\") pod \"nmstate-webhook-786f45cff4-qvmtw\" (UID: \"30e5e423-b09e-4ade-baa1-257731b5cc0b\") " pod="openshift-nmstate/nmstate-webhook-786f45cff4-qvmtw" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.584294 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5dcbbd79cf-jvbg5"] Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.585765 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-5dcbbd79cf-jvbg5" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.588957 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.593799 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.593819 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-wsbqx" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.593810 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5dcbbd79cf-jvbg5"] Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.642902 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4m98k\" (UniqueName: \"kubernetes.io/projected/8c34014e-ec30-40e8-ad99-e88f13beccfc-kube-api-access-4m98k\") pod \"nmstate-console-plugin-5dcbbd79cf-jvbg5\" (UID: \"8c34014e-ec30-40e8-ad99-e88f13beccfc\") " pod="openshift-nmstate/nmstate-console-plugin-5dcbbd79cf-jvbg5" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.642987 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtf4q\" (UniqueName: \"kubernetes.io/projected/9029ee95-172c-4b7f-b240-e6f54a9f8c0a-kube-api-access-rtf4q\") pod \"nmstate-handler-h644p\" (UID: \"9029ee95-172c-4b7f-b240-e6f54a9f8c0a\") " pod="openshift-nmstate/nmstate-handler-h644p" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.643016 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/8c34014e-ec30-40e8-ad99-e88f13beccfc-plugin-serving-cert\") pod \"nmstate-console-plugin-5dcbbd79cf-jvbg5\" (UID: \"8c34014e-ec30-40e8-ad99-e88f13beccfc\") " pod="openshift-nmstate/nmstate-console-plugin-5dcbbd79cf-jvbg5" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.643060 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/9029ee95-172c-4b7f-b240-e6f54a9f8c0a-nmstate-lock\") pod \"nmstate-handler-h644p\" (UID: \"9029ee95-172c-4b7f-b240-e6f54a9f8c0a\") " pod="openshift-nmstate/nmstate-handler-h644p" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.643167 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/9029ee95-172c-4b7f-b240-e6f54a9f8c0a-dbus-socket\") pod \"nmstate-handler-h644p\" (UID: \"9029ee95-172c-4b7f-b240-e6f54a9f8c0a\") " pod="openshift-nmstate/nmstate-handler-h644p" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.643211 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/8c34014e-ec30-40e8-ad99-e88f13beccfc-nginx-conf\") pod \"nmstate-console-plugin-5dcbbd79cf-jvbg5\" (UID: \"8c34014e-ec30-40e8-ad99-e88f13beccfc\") " pod="openshift-nmstate/nmstate-console-plugin-5dcbbd79cf-jvbg5" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.643306 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x428m\" (UniqueName: \"kubernetes.io/projected/30e5e423-b09e-4ade-baa1-257731b5cc0b-kube-api-access-x428m\") pod \"nmstate-webhook-786f45cff4-qvmtw\" (UID: \"30e5e423-b09e-4ade-baa1-257731b5cc0b\") " pod="openshift-nmstate/nmstate-webhook-786f45cff4-qvmtw" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.643399 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/9029ee95-172c-4b7f-b240-e6f54a9f8c0a-ovs-socket\") pod \"nmstate-handler-h644p\" (UID: \"9029ee95-172c-4b7f-b240-e6f54a9f8c0a\") " pod="openshift-nmstate/nmstate-handler-h644p" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.643526 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwdhr\" (UniqueName: \"kubernetes.io/projected/5f0d3058-8b6c-45aa-84de-416a2f458647-kube-api-access-pwdhr\") pod \"nmstate-metrics-69594cc75-9xjr4\" (UID: \"5f0d3058-8b6c-45aa-84de-416a2f458647\") " pod="openshift-nmstate/nmstate-metrics-69594cc75-9xjr4" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.643590 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/30e5e423-b09e-4ade-baa1-257731b5cc0b-tls-key-pair\") pod \"nmstate-webhook-786f45cff4-qvmtw\" (UID: \"30e5e423-b09e-4ade-baa1-257731b5cc0b\") " pod="openshift-nmstate/nmstate-webhook-786f45cff4-qvmtw" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.651346 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/30e5e423-b09e-4ade-baa1-257731b5cc0b-tls-key-pair\") pod \"nmstate-webhook-786f45cff4-qvmtw\" (UID: \"30e5e423-b09e-4ade-baa1-257731b5cc0b\") " pod="openshift-nmstate/nmstate-webhook-786f45cff4-qvmtw" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.662842 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwdhr\" (UniqueName: \"kubernetes.io/projected/5f0d3058-8b6c-45aa-84de-416a2f458647-kube-api-access-pwdhr\") pod \"nmstate-metrics-69594cc75-9xjr4\" (UID: \"5f0d3058-8b6c-45aa-84de-416a2f458647\") " pod="openshift-nmstate/nmstate-metrics-69594cc75-9xjr4" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.666357 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x428m\" (UniqueName: \"kubernetes.io/projected/30e5e423-b09e-4ade-baa1-257731b5cc0b-kube-api-access-x428m\") pod \"nmstate-webhook-786f45cff4-qvmtw\" (UID: \"30e5e423-b09e-4ade-baa1-257731b5cc0b\") " pod="openshift-nmstate/nmstate-webhook-786f45cff4-qvmtw" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.738714 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-69594cc75-9xjr4" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.747728 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4m98k\" (UniqueName: \"kubernetes.io/projected/8c34014e-ec30-40e8-ad99-e88f13beccfc-kube-api-access-4m98k\") pod \"nmstate-console-plugin-5dcbbd79cf-jvbg5\" (UID: \"8c34014e-ec30-40e8-ad99-e88f13beccfc\") " pod="openshift-nmstate/nmstate-console-plugin-5dcbbd79cf-jvbg5" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.747798 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtf4q\" (UniqueName: \"kubernetes.io/projected/9029ee95-172c-4b7f-b240-e6f54a9f8c0a-kube-api-access-rtf4q\") pod \"nmstate-handler-h644p\" (UID: \"9029ee95-172c-4b7f-b240-e6f54a9f8c0a\") " pod="openshift-nmstate/nmstate-handler-h644p" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.747828 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/8c34014e-ec30-40e8-ad99-e88f13beccfc-plugin-serving-cert\") pod \"nmstate-console-plugin-5dcbbd79cf-jvbg5\" (UID: \"8c34014e-ec30-40e8-ad99-e88f13beccfc\") " pod="openshift-nmstate/nmstate-console-plugin-5dcbbd79cf-jvbg5" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.747860 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/9029ee95-172c-4b7f-b240-e6f54a9f8c0a-nmstate-lock\") pod \"nmstate-handler-h644p\" (UID: \"9029ee95-172c-4b7f-b240-e6f54a9f8c0a\") " pod="openshift-nmstate/nmstate-handler-h644p" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.747915 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/9029ee95-172c-4b7f-b240-e6f54a9f8c0a-dbus-socket\") pod \"nmstate-handler-h644p\" (UID: \"9029ee95-172c-4b7f-b240-e6f54a9f8c0a\") " pod="openshift-nmstate/nmstate-handler-h644p" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.747944 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/8c34014e-ec30-40e8-ad99-e88f13beccfc-nginx-conf\") pod \"nmstate-console-plugin-5dcbbd79cf-jvbg5\" (UID: \"8c34014e-ec30-40e8-ad99-e88f13beccfc\") " pod="openshift-nmstate/nmstate-console-plugin-5dcbbd79cf-jvbg5" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.747967 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/9029ee95-172c-4b7f-b240-e6f54a9f8c0a-ovs-socket\") pod \"nmstate-handler-h644p\" (UID: \"9029ee95-172c-4b7f-b240-e6f54a9f8c0a\") " pod="openshift-nmstate/nmstate-handler-h644p" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.748085 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/9029ee95-172c-4b7f-b240-e6f54a9f8c0a-ovs-socket\") pod \"nmstate-handler-h644p\" (UID: \"9029ee95-172c-4b7f-b240-e6f54a9f8c0a\") " pod="openshift-nmstate/nmstate-handler-h644p" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.748429 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/9029ee95-172c-4b7f-b240-e6f54a9f8c0a-nmstate-lock\") pod \"nmstate-handler-h644p\" (UID: \"9029ee95-172c-4b7f-b240-e6f54a9f8c0a\") " pod="openshift-nmstate/nmstate-handler-h644p" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.748752 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/9029ee95-172c-4b7f-b240-e6f54a9f8c0a-dbus-socket\") pod \"nmstate-handler-h644p\" (UID: \"9029ee95-172c-4b7f-b240-e6f54a9f8c0a\") " pod="openshift-nmstate/nmstate-handler-h644p" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.751695 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/8c34014e-ec30-40e8-ad99-e88f13beccfc-nginx-conf\") pod \"nmstate-console-plugin-5dcbbd79cf-jvbg5\" (UID: \"8c34014e-ec30-40e8-ad99-e88f13beccfc\") " pod="openshift-nmstate/nmstate-console-plugin-5dcbbd79cf-jvbg5" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.753294 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/8c34014e-ec30-40e8-ad99-e88f13beccfc-plugin-serving-cert\") pod \"nmstate-console-plugin-5dcbbd79cf-jvbg5\" (UID: \"8c34014e-ec30-40e8-ad99-e88f13beccfc\") " pod="openshift-nmstate/nmstate-console-plugin-5dcbbd79cf-jvbg5" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.753624 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-786f45cff4-qvmtw" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.768547 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4m98k\" (UniqueName: \"kubernetes.io/projected/8c34014e-ec30-40e8-ad99-e88f13beccfc-kube-api-access-4m98k\") pod \"nmstate-console-plugin-5dcbbd79cf-jvbg5\" (UID: \"8c34014e-ec30-40e8-ad99-e88f13beccfc\") " pod="openshift-nmstate/nmstate-console-plugin-5dcbbd79cf-jvbg5" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.780576 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtf4q\" (UniqueName: \"kubernetes.io/projected/9029ee95-172c-4b7f-b240-e6f54a9f8c0a-kube-api-access-rtf4q\") pod \"nmstate-handler-h644p\" (UID: \"9029ee95-172c-4b7f-b240-e6f54a9f8c0a\") " pod="openshift-nmstate/nmstate-handler-h644p" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.807365 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-h644p" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.817554 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-78945fc675-bp8xq"] Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.818569 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.841204 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-78945fc675-bp8xq"] Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.912359 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-5dcbbd79cf-jvbg5" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.958298 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqfcm\" (UniqueName: \"kubernetes.io/projected/7a9ef835-f35f-4281-9c70-38f0ee7b125b-kube-api-access-kqfcm\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.958408 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7a9ef835-f35f-4281-9c70-38f0ee7b125b-console-config\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.958450 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7a9ef835-f35f-4281-9c70-38f0ee7b125b-trusted-ca-bundle\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.958475 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7a9ef835-f35f-4281-9c70-38f0ee7b125b-console-serving-cert\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.958504 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7a9ef835-f35f-4281-9c70-38f0ee7b125b-service-ca\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.958525 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7a9ef835-f35f-4281-9c70-38f0ee7b125b-oauth-serving-cert\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:56 crc kubenswrapper[4906]: I0227 08:43:56.958553 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7a9ef835-f35f-4281-9c70-38f0ee7b125b-console-oauth-config\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.059463 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7a9ef835-f35f-4281-9c70-38f0ee7b125b-console-config\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.059534 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7a9ef835-f35f-4281-9c70-38f0ee7b125b-trusted-ca-bundle\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.060736 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/7a9ef835-f35f-4281-9c70-38f0ee7b125b-console-config\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.061766 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7a9ef835-f35f-4281-9c70-38f0ee7b125b-trusted-ca-bundle\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.061896 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7a9ef835-f35f-4281-9c70-38f0ee7b125b-console-serving-cert\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.061950 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7a9ef835-f35f-4281-9c70-38f0ee7b125b-service-ca\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.061974 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7a9ef835-f35f-4281-9c70-38f0ee7b125b-oauth-serving-cert\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.062045 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7a9ef835-f35f-4281-9c70-38f0ee7b125b-console-oauth-config\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.063024 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/7a9ef835-f35f-4281-9c70-38f0ee7b125b-service-ca\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.063375 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/7a9ef835-f35f-4281-9c70-38f0ee7b125b-oauth-serving-cert\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.063649 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-786f45cff4-qvmtw"] Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.062365 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqfcm\" (UniqueName: \"kubernetes.io/projected/7a9ef835-f35f-4281-9c70-38f0ee7b125b-kube-api-access-kqfcm\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.067215 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/7a9ef835-f35f-4281-9c70-38f0ee7b125b-console-oauth-config\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.067743 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/7a9ef835-f35f-4281-9c70-38f0ee7b125b-console-serving-cert\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.089245 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqfcm\" (UniqueName: \"kubernetes.io/projected/7a9ef835-f35f-4281-9c70-38f0ee7b125b-kube-api-access-kqfcm\") pod \"console-78945fc675-bp8xq\" (UID: \"7a9ef835-f35f-4281-9c70-38f0ee7b125b\") " pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.137867 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.187187 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-5dcbbd79cf-jvbg5"] Feb 27 08:43:57 crc kubenswrapper[4906]: W0227 08:43:57.200515 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8c34014e_ec30_40e8_ad99_e88f13beccfc.slice/crio-49df8d142854d81c3d7c6a64f5e19ce9ce73cabc22ae61dbcc7f9b9a7f8e0676 WatchSource:0}: Error finding container 49df8d142854d81c3d7c6a64f5e19ce9ce73cabc22ae61dbcc7f9b9a7f8e0676: Status 404 returned error can't find the container with id 49df8d142854d81c3d7c6a64f5e19ce9ce73cabc22ae61dbcc7f9b9a7f8e0676 Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.218729 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-h644p" event={"ID":"9029ee95-172c-4b7f-b240-e6f54a9f8c0a","Type":"ContainerStarted","Data":"a69f5f52032ea770731487d1fbbe6df7b7d1e337a831e13a94c232bb34e61e77"} Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.220631 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-786f45cff4-qvmtw" event={"ID":"30e5e423-b09e-4ade-baa1-257731b5cc0b","Type":"ContainerStarted","Data":"720677000e3b4f4f79ffd383d2b82f057b30a364b0d4008a53897b00bcdc91f5"} Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.223037 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-6nxxh_a961de01-e505-4c80-96a0-333da958a633/kube-multus/2.log" Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.247660 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-69594cc75-9xjr4"] Feb 27 08:43:57 crc kubenswrapper[4906]: W0227 08:43:57.251693 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5f0d3058_8b6c_45aa_84de_416a2f458647.slice/crio-0ce1d9f26ef58590211d94fdac24a451c0836ace94ba5b08dedaecbfd32d968a WatchSource:0}: Error finding container 0ce1d9f26ef58590211d94fdac24a451c0836ace94ba5b08dedaecbfd32d968a: Status 404 returned error can't find the container with id 0ce1d9f26ef58590211d94fdac24a451c0836ace94ba5b08dedaecbfd32d968a Feb 27 08:43:57 crc kubenswrapper[4906]: I0227 08:43:57.367171 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-78945fc675-bp8xq"] Feb 27 08:43:57 crc kubenswrapper[4906]: W0227 08:43:57.371523 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7a9ef835_f35f_4281_9c70_38f0ee7b125b.slice/crio-cb204472941cd9370483464b1be1c08bfa63fe925752f192269cd916c821f1e4 WatchSource:0}: Error finding container cb204472941cd9370483464b1be1c08bfa63fe925752f192269cd916c821f1e4: Status 404 returned error can't find the container with id cb204472941cd9370483464b1be1c08bfa63fe925752f192269cd916c821f1e4 Feb 27 08:43:58 crc kubenswrapper[4906]: I0227 08:43:58.122286 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zf64x" Feb 27 08:43:58 crc kubenswrapper[4906]: I0227 08:43:58.181787 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zf64x" Feb 27 08:43:58 crc kubenswrapper[4906]: I0227 08:43:58.232415 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-5dcbbd79cf-jvbg5" event={"ID":"8c34014e-ec30-40e8-ad99-e88f13beccfc","Type":"ContainerStarted","Data":"49df8d142854d81c3d7c6a64f5e19ce9ce73cabc22ae61dbcc7f9b9a7f8e0676"} Feb 27 08:43:58 crc kubenswrapper[4906]: I0227 08:43:58.233442 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-69594cc75-9xjr4" event={"ID":"5f0d3058-8b6c-45aa-84de-416a2f458647","Type":"ContainerStarted","Data":"0ce1d9f26ef58590211d94fdac24a451c0836ace94ba5b08dedaecbfd32d968a"} Feb 27 08:43:58 crc kubenswrapper[4906]: I0227 08:43:58.235133 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-78945fc675-bp8xq" event={"ID":"7a9ef835-f35f-4281-9c70-38f0ee7b125b","Type":"ContainerStarted","Data":"4e2e5821e2a43eba9f48fa4f9018d14e857b47e892bc6f9331f320f6724fbb73"} Feb 27 08:43:58 crc kubenswrapper[4906]: I0227 08:43:58.235171 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-78945fc675-bp8xq" event={"ID":"7a9ef835-f35f-4281-9c70-38f0ee7b125b","Type":"ContainerStarted","Data":"cb204472941cd9370483464b1be1c08bfa63fe925752f192269cd916c821f1e4"} Feb 27 08:43:58 crc kubenswrapper[4906]: I0227 08:43:58.257802 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-78945fc675-bp8xq" podStartSLOduration=2.257779478 podStartE2EDuration="2.257779478s" podCreationTimestamp="2026-02-27 08:43:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:43:58.253828674 +0000 UTC m=+936.648230294" watchObservedRunningTime="2026-02-27 08:43:58.257779478 +0000 UTC m=+936.652181088" Feb 27 08:43:58 crc kubenswrapper[4906]: I0227 08:43:58.373669 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zf64x"] Feb 27 08:43:59 crc kubenswrapper[4906]: I0227 08:43:59.244285 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zf64x" podUID="27bb9122-6cee-4f82-af66-125124a0a2ad" containerName="registry-server" containerID="cri-o://c58f57e79b347865b42876857319e8058b34ef6994cb515bf141b364b2a97616" gracePeriod=2 Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.102949 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zf64x" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.105730 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27bb9122-6cee-4f82-af66-125124a0a2ad-catalog-content\") pod \"27bb9122-6cee-4f82-af66-125124a0a2ad\" (UID: \"27bb9122-6cee-4f82-af66-125124a0a2ad\") " Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.105809 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27bb9122-6cee-4f82-af66-125124a0a2ad-utilities\") pod \"27bb9122-6cee-4f82-af66-125124a0a2ad\" (UID: \"27bb9122-6cee-4f82-af66-125124a0a2ad\") " Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.105903 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4k89k\" (UniqueName: \"kubernetes.io/projected/27bb9122-6cee-4f82-af66-125124a0a2ad-kube-api-access-4k89k\") pod \"27bb9122-6cee-4f82-af66-125124a0a2ad\" (UID: \"27bb9122-6cee-4f82-af66-125124a0a2ad\") " Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.112051 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27bb9122-6cee-4f82-af66-125124a0a2ad-utilities" (OuterVolumeSpecName: "utilities") pod "27bb9122-6cee-4f82-af66-125124a0a2ad" (UID: "27bb9122-6cee-4f82-af66-125124a0a2ad"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.113677 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27bb9122-6cee-4f82-af66-125124a0a2ad-kube-api-access-4k89k" (OuterVolumeSpecName: "kube-api-access-4k89k") pod "27bb9122-6cee-4f82-af66-125124a0a2ad" (UID: "27bb9122-6cee-4f82-af66-125124a0a2ad"). InnerVolumeSpecName "kube-api-access-4k89k". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.155373 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536364-76q6w"] Feb 27 08:44:00 crc kubenswrapper[4906]: E0227 08:44:00.155676 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27bb9122-6cee-4f82-af66-125124a0a2ad" containerName="extract-content" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.155697 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="27bb9122-6cee-4f82-af66-125124a0a2ad" containerName="extract-content" Feb 27 08:44:00 crc kubenswrapper[4906]: E0227 08:44:00.155716 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27bb9122-6cee-4f82-af66-125124a0a2ad" containerName="extract-utilities" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.155724 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="27bb9122-6cee-4f82-af66-125124a0a2ad" containerName="extract-utilities" Feb 27 08:44:00 crc kubenswrapper[4906]: E0227 08:44:00.155741 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27bb9122-6cee-4f82-af66-125124a0a2ad" containerName="registry-server" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.155747 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="27bb9122-6cee-4f82-af66-125124a0a2ad" containerName="registry-server" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.155845 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="27bb9122-6cee-4f82-af66-125124a0a2ad" containerName="registry-server" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.156320 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536364-76q6w" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.158602 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.158697 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.158822 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.161395 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536364-76q6w"] Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.207417 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4k89k\" (UniqueName: \"kubernetes.io/projected/27bb9122-6cee-4f82-af66-125124a0a2ad-kube-api-access-4k89k\") on node \"crc\" DevicePath \"\"" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.207463 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/27bb9122-6cee-4f82-af66-125124a0a2ad-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.252870 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/27bb9122-6cee-4f82-af66-125124a0a2ad-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "27bb9122-6cee-4f82-af66-125124a0a2ad" (UID: "27bb9122-6cee-4f82-af66-125124a0a2ad"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.257125 4906 generic.go:334] "Generic (PLEG): container finished" podID="27bb9122-6cee-4f82-af66-125124a0a2ad" containerID="c58f57e79b347865b42876857319e8058b34ef6994cb515bf141b364b2a97616" exitCode=0 Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.257180 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zf64x" event={"ID":"27bb9122-6cee-4f82-af66-125124a0a2ad","Type":"ContainerDied","Data":"c58f57e79b347865b42876857319e8058b34ef6994cb515bf141b364b2a97616"} Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.257217 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zf64x" event={"ID":"27bb9122-6cee-4f82-af66-125124a0a2ad","Type":"ContainerDied","Data":"2a3f535f9f2f5a021c528803d05cf2d68982a2f2587340073f4cfb5ca9b9f638"} Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.257239 4906 scope.go:117] "RemoveContainer" containerID="c58f57e79b347865b42876857319e8058b34ef6994cb515bf141b364b2a97616" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.257307 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zf64x" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.303277 4906 scope.go:117] "RemoveContainer" containerID="75d18bb7151e700f84257a822388bd87806029bb4b55dc303fc9aabb9e4200fe" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.313893 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2qj7\" (UniqueName: \"kubernetes.io/projected/fd15954e-2839-4a77-890e-16267bbb27b6-kube-api-access-b2qj7\") pod \"auto-csr-approver-29536364-76q6w\" (UID: \"fd15954e-2839-4a77-890e-16267bbb27b6\") " pod="openshift-infra/auto-csr-approver-29536364-76q6w" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.314003 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/27bb9122-6cee-4f82-af66-125124a0a2ad-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.332827 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zf64x"] Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.343266 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zf64x"] Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.351034 4906 scope.go:117] "RemoveContainer" containerID="548b2faa201984afaf449cd8378c54e9834c1f2e62af33a0c8a75dfe62c9887d" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.375333 4906 scope.go:117] "RemoveContainer" containerID="c58f57e79b347865b42876857319e8058b34ef6994cb515bf141b364b2a97616" Feb 27 08:44:00 crc kubenswrapper[4906]: E0227 08:44:00.376076 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c58f57e79b347865b42876857319e8058b34ef6994cb515bf141b364b2a97616\": container with ID starting with c58f57e79b347865b42876857319e8058b34ef6994cb515bf141b364b2a97616 not found: ID does not exist" containerID="c58f57e79b347865b42876857319e8058b34ef6994cb515bf141b364b2a97616" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.376152 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c58f57e79b347865b42876857319e8058b34ef6994cb515bf141b364b2a97616"} err="failed to get container status \"c58f57e79b347865b42876857319e8058b34ef6994cb515bf141b364b2a97616\": rpc error: code = NotFound desc = could not find container \"c58f57e79b347865b42876857319e8058b34ef6994cb515bf141b364b2a97616\": container with ID starting with c58f57e79b347865b42876857319e8058b34ef6994cb515bf141b364b2a97616 not found: ID does not exist" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.376195 4906 scope.go:117] "RemoveContainer" containerID="75d18bb7151e700f84257a822388bd87806029bb4b55dc303fc9aabb9e4200fe" Feb 27 08:44:00 crc kubenswrapper[4906]: E0227 08:44:00.376939 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75d18bb7151e700f84257a822388bd87806029bb4b55dc303fc9aabb9e4200fe\": container with ID starting with 75d18bb7151e700f84257a822388bd87806029bb4b55dc303fc9aabb9e4200fe not found: ID does not exist" containerID="75d18bb7151e700f84257a822388bd87806029bb4b55dc303fc9aabb9e4200fe" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.376995 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75d18bb7151e700f84257a822388bd87806029bb4b55dc303fc9aabb9e4200fe"} err="failed to get container status \"75d18bb7151e700f84257a822388bd87806029bb4b55dc303fc9aabb9e4200fe\": rpc error: code = NotFound desc = could not find container \"75d18bb7151e700f84257a822388bd87806029bb4b55dc303fc9aabb9e4200fe\": container with ID starting with 75d18bb7151e700f84257a822388bd87806029bb4b55dc303fc9aabb9e4200fe not found: ID does not exist" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.377085 4906 scope.go:117] "RemoveContainer" containerID="548b2faa201984afaf449cd8378c54e9834c1f2e62af33a0c8a75dfe62c9887d" Feb 27 08:44:00 crc kubenswrapper[4906]: E0227 08:44:00.377686 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"548b2faa201984afaf449cd8378c54e9834c1f2e62af33a0c8a75dfe62c9887d\": container with ID starting with 548b2faa201984afaf449cd8378c54e9834c1f2e62af33a0c8a75dfe62c9887d not found: ID does not exist" containerID="548b2faa201984afaf449cd8378c54e9834c1f2e62af33a0c8a75dfe62c9887d" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.377712 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"548b2faa201984afaf449cd8378c54e9834c1f2e62af33a0c8a75dfe62c9887d"} err="failed to get container status \"548b2faa201984afaf449cd8378c54e9834c1f2e62af33a0c8a75dfe62c9887d\": rpc error: code = NotFound desc = could not find container \"548b2faa201984afaf449cd8378c54e9834c1f2e62af33a0c8a75dfe62c9887d\": container with ID starting with 548b2faa201984afaf449cd8378c54e9834c1f2e62af33a0c8a75dfe62c9887d not found: ID does not exist" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.415050 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2qj7\" (UniqueName: \"kubernetes.io/projected/fd15954e-2839-4a77-890e-16267bbb27b6-kube-api-access-b2qj7\") pod \"auto-csr-approver-29536364-76q6w\" (UID: \"fd15954e-2839-4a77-890e-16267bbb27b6\") " pod="openshift-infra/auto-csr-approver-29536364-76q6w" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.437400 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2qj7\" (UniqueName: \"kubernetes.io/projected/fd15954e-2839-4a77-890e-16267bbb27b6-kube-api-access-b2qj7\") pod \"auto-csr-approver-29536364-76q6w\" (UID: \"fd15954e-2839-4a77-890e-16267bbb27b6\") " pod="openshift-infra/auto-csr-approver-29536364-76q6w" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.475770 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536364-76q6w" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.562202 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27bb9122-6cee-4f82-af66-125124a0a2ad" path="/var/lib/kubelet/pods/27bb9122-6cee-4f82-af66-125124a0a2ad/volumes" Feb 27 08:44:00 crc kubenswrapper[4906]: I0227 08:44:00.698429 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536364-76q6w"] Feb 27 08:44:01 crc kubenswrapper[4906]: I0227 08:44:01.268583 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-786f45cff4-qvmtw" event={"ID":"30e5e423-b09e-4ade-baa1-257731b5cc0b","Type":"ContainerStarted","Data":"75ec6c0c34f6350cfa9d7497e410ac856d0b8cd9dbde6b269f09a04bb28a8fac"} Feb 27 08:44:01 crc kubenswrapper[4906]: I0227 08:44:01.269253 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-786f45cff4-qvmtw" Feb 27 08:44:01 crc kubenswrapper[4906]: I0227 08:44:01.271198 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-69594cc75-9xjr4" event={"ID":"5f0d3058-8b6c-45aa-84de-416a2f458647","Type":"ContainerStarted","Data":"8b8e2b004dc3a8ff165d88bd3c1ba48663fb3905e6d3520c2d34ec1fd575be51"} Feb 27 08:44:01 crc kubenswrapper[4906]: I0227 08:44:01.279356 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536364-76q6w" event={"ID":"fd15954e-2839-4a77-890e-16267bbb27b6","Type":"ContainerStarted","Data":"9be62f6c59dc56e6176c89481e431a6185919d57092c33152f7e469eef7c5843"} Feb 27 08:44:01 crc kubenswrapper[4906]: I0227 08:44:01.281389 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-h644p" event={"ID":"9029ee95-172c-4b7f-b240-e6f54a9f8c0a","Type":"ContainerStarted","Data":"c520fb05fff1c400195eb9d7abc4794009de1be82c60d31788a3e24892a3d60e"} Feb 27 08:44:01 crc kubenswrapper[4906]: I0227 08:44:01.282927 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-5dcbbd79cf-jvbg5" event={"ID":"8c34014e-ec30-40e8-ad99-e88f13beccfc","Type":"ContainerStarted","Data":"8ef3d8d49ff6e7b66ed0eca02cca50243b895368728ae51bd8a2c6957b5ce751"} Feb 27 08:44:01 crc kubenswrapper[4906]: I0227 08:44:01.291867 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-786f45cff4-qvmtw" podStartSLOduration=2.148768475 podStartE2EDuration="5.291846749s" podCreationTimestamp="2026-02-27 08:43:56 +0000 UTC" firstStartedPulling="2026-02-27 08:43:57.076581094 +0000 UTC m=+935.470982704" lastFinishedPulling="2026-02-27 08:44:00.219659368 +0000 UTC m=+938.614060978" observedRunningTime="2026-02-27 08:44:01.287616587 +0000 UTC m=+939.682018217" watchObservedRunningTime="2026-02-27 08:44:01.291846749 +0000 UTC m=+939.686248349" Feb 27 08:44:01 crc kubenswrapper[4906]: I0227 08:44:01.320736 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-h644p" podStartSLOduration=2.010631683 podStartE2EDuration="5.320704369s" podCreationTimestamp="2026-02-27 08:43:56 +0000 UTC" firstStartedPulling="2026-02-27 08:43:56.882891789 +0000 UTC m=+935.277293399" lastFinishedPulling="2026-02-27 08:44:00.192964475 +0000 UTC m=+938.587366085" observedRunningTime="2026-02-27 08:44:01.314095475 +0000 UTC m=+939.708497095" watchObservedRunningTime="2026-02-27 08:44:01.320704369 +0000 UTC m=+939.715105979" Feb 27 08:44:01 crc kubenswrapper[4906]: I0227 08:44:01.332051 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-5dcbbd79cf-jvbg5" podStartSLOduration=2.351223891 podStartE2EDuration="5.332035268s" podCreationTimestamp="2026-02-27 08:43:56 +0000 UTC" firstStartedPulling="2026-02-27 08:43:57.212479396 +0000 UTC m=+935.606881006" lastFinishedPulling="2026-02-27 08:44:00.193290773 +0000 UTC m=+938.587692383" observedRunningTime="2026-02-27 08:44:01.330342123 +0000 UTC m=+939.724743743" watchObservedRunningTime="2026-02-27 08:44:01.332035268 +0000 UTC m=+939.726436878" Feb 27 08:44:01 crc kubenswrapper[4906]: I0227 08:44:01.811724 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-h644p" Feb 27 08:44:02 crc kubenswrapper[4906]: I0227 08:44:02.293929 4906 generic.go:334] "Generic (PLEG): container finished" podID="fd15954e-2839-4a77-890e-16267bbb27b6" containerID="39126b2860e040002b9da8ecaa22519f453ac1405289cde078ae26f470ef55fb" exitCode=0 Feb 27 08:44:02 crc kubenswrapper[4906]: I0227 08:44:02.293987 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536364-76q6w" event={"ID":"fd15954e-2839-4a77-890e-16267bbb27b6","Type":"ContainerDied","Data":"39126b2860e040002b9da8ecaa22519f453ac1405289cde078ae26f470ef55fb"} Feb 27 08:44:03 crc kubenswrapper[4906]: I0227 08:44:03.305760 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-69594cc75-9xjr4" event={"ID":"5f0d3058-8b6c-45aa-84de-416a2f458647","Type":"ContainerStarted","Data":"639e8aeaaadfc3d58be99735bbdb49c0d8799695f80e577d473ff85675c89729"} Feb 27 08:44:03 crc kubenswrapper[4906]: I0227 08:44:03.335424 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-69594cc75-9xjr4" podStartSLOduration=1.951031553 podStartE2EDuration="7.335372122s" podCreationTimestamp="2026-02-27 08:43:56 +0000 UTC" firstStartedPulling="2026-02-27 08:43:57.254854193 +0000 UTC m=+935.649256003" lastFinishedPulling="2026-02-27 08:44:02.639194962 +0000 UTC m=+941.033596572" observedRunningTime="2026-02-27 08:44:03.331346366 +0000 UTC m=+941.725748016" watchObservedRunningTime="2026-02-27 08:44:03.335372122 +0000 UTC m=+941.729773742" Feb 27 08:44:03 crc kubenswrapper[4906]: I0227 08:44:03.640242 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536364-76q6w" Feb 27 08:44:03 crc kubenswrapper[4906]: I0227 08:44:03.667803 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b2qj7\" (UniqueName: \"kubernetes.io/projected/fd15954e-2839-4a77-890e-16267bbb27b6-kube-api-access-b2qj7\") pod \"fd15954e-2839-4a77-890e-16267bbb27b6\" (UID: \"fd15954e-2839-4a77-890e-16267bbb27b6\") " Feb 27 08:44:03 crc kubenswrapper[4906]: I0227 08:44:03.675865 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fd15954e-2839-4a77-890e-16267bbb27b6-kube-api-access-b2qj7" (OuterVolumeSpecName: "kube-api-access-b2qj7") pod "fd15954e-2839-4a77-890e-16267bbb27b6" (UID: "fd15954e-2839-4a77-890e-16267bbb27b6"). InnerVolumeSpecName "kube-api-access-b2qj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:44:03 crc kubenswrapper[4906]: I0227 08:44:03.772371 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b2qj7\" (UniqueName: \"kubernetes.io/projected/fd15954e-2839-4a77-890e-16267bbb27b6-kube-api-access-b2qj7\") on node \"crc\" DevicePath \"\"" Feb 27 08:44:04 crc kubenswrapper[4906]: I0227 08:44:04.314445 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536364-76q6w" event={"ID":"fd15954e-2839-4a77-890e-16267bbb27b6","Type":"ContainerDied","Data":"9be62f6c59dc56e6176c89481e431a6185919d57092c33152f7e469eef7c5843"} Feb 27 08:44:04 crc kubenswrapper[4906]: I0227 08:44:04.314516 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9be62f6c59dc56e6176c89481e431a6185919d57092c33152f7e469eef7c5843" Feb 27 08:44:04 crc kubenswrapper[4906]: I0227 08:44:04.314468 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536364-76q6w" Feb 27 08:44:04 crc kubenswrapper[4906]: I0227 08:44:04.698308 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536358-tb244"] Feb 27 08:44:04 crc kubenswrapper[4906]: I0227 08:44:04.704293 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536358-tb244"] Feb 27 08:44:06 crc kubenswrapper[4906]: I0227 08:44:06.564208 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e8b22f9-fdf8-40a7-881e-fdcb521e7ece" path="/var/lib/kubelet/pods/8e8b22f9-fdf8-40a7-881e-fdcb521e7ece/volumes" Feb 27 08:44:06 crc kubenswrapper[4906]: I0227 08:44:06.838703 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-h644p" Feb 27 08:44:07 crc kubenswrapper[4906]: I0227 08:44:07.139423 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:44:07 crc kubenswrapper[4906]: I0227 08:44:07.139476 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:44:07 crc kubenswrapper[4906]: I0227 08:44:07.145372 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:44:07 crc kubenswrapper[4906]: I0227 08:44:07.344488 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-78945fc675-bp8xq" Feb 27 08:44:07 crc kubenswrapper[4906]: I0227 08:44:07.402953 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-xx8qm"] Feb 27 08:44:16 crc kubenswrapper[4906]: I0227 08:44:16.763717 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-786f45cff4-qvmtw" Feb 27 08:44:30 crc kubenswrapper[4906]: I0227 08:44:30.712744 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6"] Feb 27 08:44:30 crc kubenswrapper[4906]: E0227 08:44:30.713956 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fd15954e-2839-4a77-890e-16267bbb27b6" containerName="oc" Feb 27 08:44:30 crc kubenswrapper[4906]: I0227 08:44:30.713977 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="fd15954e-2839-4a77-890e-16267bbb27b6" containerName="oc" Feb 27 08:44:30 crc kubenswrapper[4906]: I0227 08:44:30.714163 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="fd15954e-2839-4a77-890e-16267bbb27b6" containerName="oc" Feb 27 08:44:30 crc kubenswrapper[4906]: I0227 08:44:30.715199 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6" Feb 27 08:44:30 crc kubenswrapper[4906]: I0227 08:44:30.718389 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 27 08:44:30 crc kubenswrapper[4906]: I0227 08:44:30.721872 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6"] Feb 27 08:44:30 crc kubenswrapper[4906]: I0227 08:44:30.815040 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1c0d5365-d44e-4f3c-ac5e-93a61d9f4272-bundle\") pod \"d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6\" (UID: \"1c0d5365-d44e-4f3c-ac5e-93a61d9f4272\") " pod="openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6" Feb 27 08:44:30 crc kubenswrapper[4906]: I0227 08:44:30.815100 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9p68\" (UniqueName: \"kubernetes.io/projected/1c0d5365-d44e-4f3c-ac5e-93a61d9f4272-kube-api-access-c9p68\") pod \"d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6\" (UID: \"1c0d5365-d44e-4f3c-ac5e-93a61d9f4272\") " pod="openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6" Feb 27 08:44:30 crc kubenswrapper[4906]: I0227 08:44:30.815135 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1c0d5365-d44e-4f3c-ac5e-93a61d9f4272-util\") pod \"d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6\" (UID: \"1c0d5365-d44e-4f3c-ac5e-93a61d9f4272\") " pod="openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6" Feb 27 08:44:30 crc kubenswrapper[4906]: I0227 08:44:30.916462 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1c0d5365-d44e-4f3c-ac5e-93a61d9f4272-bundle\") pod \"d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6\" (UID: \"1c0d5365-d44e-4f3c-ac5e-93a61d9f4272\") " pod="openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6" Feb 27 08:44:30 crc kubenswrapper[4906]: I0227 08:44:30.916557 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9p68\" (UniqueName: \"kubernetes.io/projected/1c0d5365-d44e-4f3c-ac5e-93a61d9f4272-kube-api-access-c9p68\") pod \"d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6\" (UID: \"1c0d5365-d44e-4f3c-ac5e-93a61d9f4272\") " pod="openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6" Feb 27 08:44:30 crc kubenswrapper[4906]: I0227 08:44:30.916622 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1c0d5365-d44e-4f3c-ac5e-93a61d9f4272-util\") pod \"d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6\" (UID: \"1c0d5365-d44e-4f3c-ac5e-93a61d9f4272\") " pod="openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6" Feb 27 08:44:30 crc kubenswrapper[4906]: I0227 08:44:30.917298 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1c0d5365-d44e-4f3c-ac5e-93a61d9f4272-bundle\") pod \"d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6\" (UID: \"1c0d5365-d44e-4f3c-ac5e-93a61d9f4272\") " pod="openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6" Feb 27 08:44:30 crc kubenswrapper[4906]: I0227 08:44:30.917513 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1c0d5365-d44e-4f3c-ac5e-93a61d9f4272-util\") pod \"d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6\" (UID: \"1c0d5365-d44e-4f3c-ac5e-93a61d9f4272\") " pod="openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6" Feb 27 08:44:30 crc kubenswrapper[4906]: I0227 08:44:30.944060 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9p68\" (UniqueName: \"kubernetes.io/projected/1c0d5365-d44e-4f3c-ac5e-93a61d9f4272-kube-api-access-c9p68\") pod \"d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6\" (UID: \"1c0d5365-d44e-4f3c-ac5e-93a61d9f4272\") " pod="openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6" Feb 27 08:44:31 crc kubenswrapper[4906]: I0227 08:44:31.053043 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6" Feb 27 08:44:31 crc kubenswrapper[4906]: I0227 08:44:31.488554 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6"] Feb 27 08:44:31 crc kubenswrapper[4906]: I0227 08:44:31.533763 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6" event={"ID":"1c0d5365-d44e-4f3c-ac5e-93a61d9f4272","Type":"ContainerStarted","Data":"c44659dbeed39248d1b45f21d3895227925734cd6a6ae1d9d1901011085fe4c1"} Feb 27 08:44:32 crc kubenswrapper[4906]: I0227 08:44:32.464459 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-xx8qm" podUID="a861f5dc-100c-443f-ab72-ecfe71895998" containerName="console" containerID="cri-o://3f88d9e398c9fe052cd497717ad278ef284fd1dcef32b5593f93ab22d94702b1" gracePeriod=15 Feb 27 08:44:32 crc kubenswrapper[4906]: I0227 08:44:32.543906 4906 generic.go:334] "Generic (PLEG): container finished" podID="1c0d5365-d44e-4f3c-ac5e-93a61d9f4272" containerID="13acf216d4e790f7a6c121d35760270001f0f88de566ffeb93bb055afb773794" exitCode=0 Feb 27 08:44:32 crc kubenswrapper[4906]: I0227 08:44:32.543987 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6" event={"ID":"1c0d5365-d44e-4f3c-ac5e-93a61d9f4272","Type":"ContainerDied","Data":"13acf216d4e790f7a6c121d35760270001f0f88de566ffeb93bb055afb773794"} Feb 27 08:44:32 crc kubenswrapper[4906]: I0227 08:44:32.893057 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-xx8qm_a861f5dc-100c-443f-ab72-ecfe71895998/console/0.log" Feb 27 08:44:32 crc kubenswrapper[4906]: I0227 08:44:32.893555 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:44:32 crc kubenswrapper[4906]: I0227 08:44:32.945465 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a861f5dc-100c-443f-ab72-ecfe71895998-console-serving-cert\") pod \"a861f5dc-100c-443f-ab72-ecfe71895998\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " Feb 27 08:44:32 crc kubenswrapper[4906]: I0227 08:44:32.945597 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-console-config\") pod \"a861f5dc-100c-443f-ab72-ecfe71895998\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " Feb 27 08:44:32 crc kubenswrapper[4906]: I0227 08:44:32.945636 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a861f5dc-100c-443f-ab72-ecfe71895998-console-oauth-config\") pod \"a861f5dc-100c-443f-ab72-ecfe71895998\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " Feb 27 08:44:32 crc kubenswrapper[4906]: I0227 08:44:32.945702 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-service-ca\") pod \"a861f5dc-100c-443f-ab72-ecfe71895998\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " Feb 27 08:44:32 crc kubenswrapper[4906]: I0227 08:44:32.945773 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gnm6\" (UniqueName: \"kubernetes.io/projected/a861f5dc-100c-443f-ab72-ecfe71895998-kube-api-access-5gnm6\") pod \"a861f5dc-100c-443f-ab72-ecfe71895998\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " Feb 27 08:44:32 crc kubenswrapper[4906]: I0227 08:44:32.945823 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-oauth-serving-cert\") pod \"a861f5dc-100c-443f-ab72-ecfe71895998\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " Feb 27 08:44:32 crc kubenswrapper[4906]: I0227 08:44:32.945960 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-trusted-ca-bundle\") pod \"a861f5dc-100c-443f-ab72-ecfe71895998\" (UID: \"a861f5dc-100c-443f-ab72-ecfe71895998\") " Feb 27 08:44:32 crc kubenswrapper[4906]: I0227 08:44:32.947765 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "a861f5dc-100c-443f-ab72-ecfe71895998" (UID: "a861f5dc-100c-443f-ab72-ecfe71895998"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:44:32 crc kubenswrapper[4906]: I0227 08:44:32.947753 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-console-config" (OuterVolumeSpecName: "console-config") pod "a861f5dc-100c-443f-ab72-ecfe71895998" (UID: "a861f5dc-100c-443f-ab72-ecfe71895998"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:44:32 crc kubenswrapper[4906]: I0227 08:44:32.948457 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-service-ca" (OuterVolumeSpecName: "service-ca") pod "a861f5dc-100c-443f-ab72-ecfe71895998" (UID: "a861f5dc-100c-443f-ab72-ecfe71895998"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:44:32 crc kubenswrapper[4906]: I0227 08:44:32.948489 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "a861f5dc-100c-443f-ab72-ecfe71895998" (UID: "a861f5dc-100c-443f-ab72-ecfe71895998"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:44:32 crc kubenswrapper[4906]: I0227 08:44:32.955444 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a861f5dc-100c-443f-ab72-ecfe71895998-kube-api-access-5gnm6" (OuterVolumeSpecName: "kube-api-access-5gnm6") pod "a861f5dc-100c-443f-ab72-ecfe71895998" (UID: "a861f5dc-100c-443f-ab72-ecfe71895998"). InnerVolumeSpecName "kube-api-access-5gnm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:44:32 crc kubenswrapper[4906]: I0227 08:44:32.955639 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a861f5dc-100c-443f-ab72-ecfe71895998-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "a861f5dc-100c-443f-ab72-ecfe71895998" (UID: "a861f5dc-100c-443f-ab72-ecfe71895998"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:44:32 crc kubenswrapper[4906]: I0227 08:44:32.955528 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a861f5dc-100c-443f-ab72-ecfe71895998-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "a861f5dc-100c-443f-ab72-ecfe71895998" (UID: "a861f5dc-100c-443f-ab72-ecfe71895998"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:44:33 crc kubenswrapper[4906]: I0227 08:44:33.048254 4906 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-service-ca\") on node \"crc\" DevicePath \"\"" Feb 27 08:44:33 crc kubenswrapper[4906]: I0227 08:44:33.048296 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gnm6\" (UniqueName: \"kubernetes.io/projected/a861f5dc-100c-443f-ab72-ecfe71895998-kube-api-access-5gnm6\") on node \"crc\" DevicePath \"\"" Feb 27 08:44:33 crc kubenswrapper[4906]: I0227 08:44:33.048311 4906 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:44:33 crc kubenswrapper[4906]: I0227 08:44:33.048325 4906 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:44:33 crc kubenswrapper[4906]: I0227 08:44:33.048337 4906 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a861f5dc-100c-443f-ab72-ecfe71895998-console-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 27 08:44:33 crc kubenswrapper[4906]: I0227 08:44:33.048348 4906 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a861f5dc-100c-443f-ab72-ecfe71895998-console-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:44:33 crc kubenswrapper[4906]: I0227 08:44:33.048359 4906 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a861f5dc-100c-443f-ab72-ecfe71895998-console-oauth-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:44:33 crc kubenswrapper[4906]: I0227 08:44:33.988564 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-xx8qm_a861f5dc-100c-443f-ab72-ecfe71895998/console/0.log" Feb 27 08:44:33 crc kubenswrapper[4906]: I0227 08:44:33.988647 4906 generic.go:334] "Generic (PLEG): container finished" podID="a861f5dc-100c-443f-ab72-ecfe71895998" containerID="3f88d9e398c9fe052cd497717ad278ef284fd1dcef32b5593f93ab22d94702b1" exitCode=2 Feb 27 08:44:33 crc kubenswrapper[4906]: I0227 08:44:33.988693 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xx8qm" event={"ID":"a861f5dc-100c-443f-ab72-ecfe71895998","Type":"ContainerDied","Data":"3f88d9e398c9fe052cd497717ad278ef284fd1dcef32b5593f93ab22d94702b1"} Feb 27 08:44:33 crc kubenswrapper[4906]: I0227 08:44:33.988735 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xx8qm" event={"ID":"a861f5dc-100c-443f-ab72-ecfe71895998","Type":"ContainerDied","Data":"a17b7c6f779e5df4e396835875b24755438bba43b8c32d643f837c06879256ac"} Feb 27 08:44:33 crc kubenswrapper[4906]: I0227 08:44:33.988737 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xx8qm" Feb 27 08:44:33 crc kubenswrapper[4906]: I0227 08:44:33.988759 4906 scope.go:117] "RemoveContainer" containerID="3f88d9e398c9fe052cd497717ad278ef284fd1dcef32b5593f93ab22d94702b1" Feb 27 08:44:34 crc kubenswrapper[4906]: I0227 08:44:34.030425 4906 scope.go:117] "RemoveContainer" containerID="3f88d9e398c9fe052cd497717ad278ef284fd1dcef32b5593f93ab22d94702b1" Feb 27 08:44:34 crc kubenswrapper[4906]: E0227 08:44:34.031658 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f88d9e398c9fe052cd497717ad278ef284fd1dcef32b5593f93ab22d94702b1\": container with ID starting with 3f88d9e398c9fe052cd497717ad278ef284fd1dcef32b5593f93ab22d94702b1 not found: ID does not exist" containerID="3f88d9e398c9fe052cd497717ad278ef284fd1dcef32b5593f93ab22d94702b1" Feb 27 08:44:34 crc kubenswrapper[4906]: I0227 08:44:34.031699 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f88d9e398c9fe052cd497717ad278ef284fd1dcef32b5593f93ab22d94702b1"} err="failed to get container status \"3f88d9e398c9fe052cd497717ad278ef284fd1dcef32b5593f93ab22d94702b1\": rpc error: code = NotFound desc = could not find container \"3f88d9e398c9fe052cd497717ad278ef284fd1dcef32b5593f93ab22d94702b1\": container with ID starting with 3f88d9e398c9fe052cd497717ad278ef284fd1dcef32b5593f93ab22d94702b1 not found: ID does not exist" Feb 27 08:44:34 crc kubenswrapper[4906]: I0227 08:44:34.064437 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-xx8qm"] Feb 27 08:44:34 crc kubenswrapper[4906]: I0227 08:44:34.071201 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-xx8qm"] Feb 27 08:44:34 crc kubenswrapper[4906]: I0227 08:44:34.560144 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a861f5dc-100c-443f-ab72-ecfe71895998" path="/var/lib/kubelet/pods/a861f5dc-100c-443f-ab72-ecfe71895998/volumes" Feb 27 08:44:36 crc kubenswrapper[4906]: I0227 08:44:36.015497 4906 generic.go:334] "Generic (PLEG): container finished" podID="1c0d5365-d44e-4f3c-ac5e-93a61d9f4272" containerID="c8f6beb00f2e899260bd0ec2e70c137ae9f7bc42969d3c39a71e2d5ea5041a0f" exitCode=0 Feb 27 08:44:36 crc kubenswrapper[4906]: I0227 08:44:36.015578 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6" event={"ID":"1c0d5365-d44e-4f3c-ac5e-93a61d9f4272","Type":"ContainerDied","Data":"c8f6beb00f2e899260bd0ec2e70c137ae9f7bc42969d3c39a71e2d5ea5041a0f"} Feb 27 08:44:37 crc kubenswrapper[4906]: I0227 08:44:37.026545 4906 generic.go:334] "Generic (PLEG): container finished" podID="1c0d5365-d44e-4f3c-ac5e-93a61d9f4272" containerID="0c522e5bc7901f7bc949a3bdf0abf14503884b85ff357798b92ebf806d485766" exitCode=0 Feb 27 08:44:37 crc kubenswrapper[4906]: I0227 08:44:37.026616 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6" event={"ID":"1c0d5365-d44e-4f3c-ac5e-93a61d9f4272","Type":"ContainerDied","Data":"0c522e5bc7901f7bc949a3bdf0abf14503884b85ff357798b92ebf806d485766"} Feb 27 08:44:38 crc kubenswrapper[4906]: I0227 08:44:38.331707 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6" Feb 27 08:44:38 crc kubenswrapper[4906]: I0227 08:44:38.360193 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c9p68\" (UniqueName: \"kubernetes.io/projected/1c0d5365-d44e-4f3c-ac5e-93a61d9f4272-kube-api-access-c9p68\") pod \"1c0d5365-d44e-4f3c-ac5e-93a61d9f4272\" (UID: \"1c0d5365-d44e-4f3c-ac5e-93a61d9f4272\") " Feb 27 08:44:38 crc kubenswrapper[4906]: I0227 08:44:38.360349 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1c0d5365-d44e-4f3c-ac5e-93a61d9f4272-util\") pod \"1c0d5365-d44e-4f3c-ac5e-93a61d9f4272\" (UID: \"1c0d5365-d44e-4f3c-ac5e-93a61d9f4272\") " Feb 27 08:44:38 crc kubenswrapper[4906]: I0227 08:44:38.360429 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1c0d5365-d44e-4f3c-ac5e-93a61d9f4272-bundle\") pod \"1c0d5365-d44e-4f3c-ac5e-93a61d9f4272\" (UID: \"1c0d5365-d44e-4f3c-ac5e-93a61d9f4272\") " Feb 27 08:44:38 crc kubenswrapper[4906]: I0227 08:44:38.361700 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c0d5365-d44e-4f3c-ac5e-93a61d9f4272-bundle" (OuterVolumeSpecName: "bundle") pod "1c0d5365-d44e-4f3c-ac5e-93a61d9f4272" (UID: "1c0d5365-d44e-4f3c-ac5e-93a61d9f4272"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:44:38 crc kubenswrapper[4906]: I0227 08:44:38.368628 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c0d5365-d44e-4f3c-ac5e-93a61d9f4272-kube-api-access-c9p68" (OuterVolumeSpecName: "kube-api-access-c9p68") pod "1c0d5365-d44e-4f3c-ac5e-93a61d9f4272" (UID: "1c0d5365-d44e-4f3c-ac5e-93a61d9f4272"). InnerVolumeSpecName "kube-api-access-c9p68". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:44:38 crc kubenswrapper[4906]: I0227 08:44:38.372582 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1c0d5365-d44e-4f3c-ac5e-93a61d9f4272-util" (OuterVolumeSpecName: "util") pod "1c0d5365-d44e-4f3c-ac5e-93a61d9f4272" (UID: "1c0d5365-d44e-4f3c-ac5e-93a61d9f4272"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:44:38 crc kubenswrapper[4906]: I0227 08:44:38.462126 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c9p68\" (UniqueName: \"kubernetes.io/projected/1c0d5365-d44e-4f3c-ac5e-93a61d9f4272-kube-api-access-c9p68\") on node \"crc\" DevicePath \"\"" Feb 27 08:44:38 crc kubenswrapper[4906]: I0227 08:44:38.462180 4906 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1c0d5365-d44e-4f3c-ac5e-93a61d9f4272-util\") on node \"crc\" DevicePath \"\"" Feb 27 08:44:38 crc kubenswrapper[4906]: I0227 08:44:38.462195 4906 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1c0d5365-d44e-4f3c-ac5e-93a61d9f4272-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:44:39 crc kubenswrapper[4906]: I0227 08:44:39.043695 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6" event={"ID":"1c0d5365-d44e-4f3c-ac5e-93a61d9f4272","Type":"ContainerDied","Data":"c44659dbeed39248d1b45f21d3895227925734cd6a6ae1d9d1901011085fe4c1"} Feb 27 08:44:39 crc kubenswrapper[4906]: I0227 08:44:39.043743 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c44659dbeed39248d1b45f21d3895227925734cd6a6ae1d9d1901011085fe4c1" Feb 27 08:44:39 crc kubenswrapper[4906]: I0227 08:44:39.043824 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.052243 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-697697b9b5-v5fgw"] Feb 27 08:44:49 crc kubenswrapper[4906]: E0227 08:44:49.053027 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c0d5365-d44e-4f3c-ac5e-93a61d9f4272" containerName="pull" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.053040 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c0d5365-d44e-4f3c-ac5e-93a61d9f4272" containerName="pull" Feb 27 08:44:49 crc kubenswrapper[4906]: E0227 08:44:49.053055 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c0d5365-d44e-4f3c-ac5e-93a61d9f4272" containerName="extract" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.053062 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c0d5365-d44e-4f3c-ac5e-93a61d9f4272" containerName="extract" Feb 27 08:44:49 crc kubenswrapper[4906]: E0227 08:44:49.053083 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a861f5dc-100c-443f-ab72-ecfe71895998" containerName="console" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.053090 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="a861f5dc-100c-443f-ab72-ecfe71895998" containerName="console" Feb 27 08:44:49 crc kubenswrapper[4906]: E0227 08:44:49.053108 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c0d5365-d44e-4f3c-ac5e-93a61d9f4272" containerName="util" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.053115 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c0d5365-d44e-4f3c-ac5e-93a61d9f4272" containerName="util" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.053265 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c0d5365-d44e-4f3c-ac5e-93a61d9f4272" containerName="extract" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.053277 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="a861f5dc-100c-443f-ab72-ecfe71895998" containerName="console" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.053773 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-697697b9b5-v5fgw" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.055871 4906 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.056152 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.056166 4906 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-smf5m" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.056521 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.057400 4906 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.080580 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-697697b9b5-v5fgw"] Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.212053 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/97639e95-0dfe-4ed7-8702-f0de909b3c09-apiservice-cert\") pod \"metallb-operator-controller-manager-697697b9b5-v5fgw\" (UID: \"97639e95-0dfe-4ed7-8702-f0de909b3c09\") " pod="metallb-system/metallb-operator-controller-manager-697697b9b5-v5fgw" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.212131 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/97639e95-0dfe-4ed7-8702-f0de909b3c09-webhook-cert\") pod \"metallb-operator-controller-manager-697697b9b5-v5fgw\" (UID: \"97639e95-0dfe-4ed7-8702-f0de909b3c09\") " pod="metallb-system/metallb-operator-controller-manager-697697b9b5-v5fgw" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.212171 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mp25n\" (UniqueName: \"kubernetes.io/projected/97639e95-0dfe-4ed7-8702-f0de909b3c09-kube-api-access-mp25n\") pod \"metallb-operator-controller-manager-697697b9b5-v5fgw\" (UID: \"97639e95-0dfe-4ed7-8702-f0de909b3c09\") " pod="metallb-system/metallb-operator-controller-manager-697697b9b5-v5fgw" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.313220 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/97639e95-0dfe-4ed7-8702-f0de909b3c09-webhook-cert\") pod \"metallb-operator-controller-manager-697697b9b5-v5fgw\" (UID: \"97639e95-0dfe-4ed7-8702-f0de909b3c09\") " pod="metallb-system/metallb-operator-controller-manager-697697b9b5-v5fgw" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.313721 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mp25n\" (UniqueName: \"kubernetes.io/projected/97639e95-0dfe-4ed7-8702-f0de909b3c09-kube-api-access-mp25n\") pod \"metallb-operator-controller-manager-697697b9b5-v5fgw\" (UID: \"97639e95-0dfe-4ed7-8702-f0de909b3c09\") " pod="metallb-system/metallb-operator-controller-manager-697697b9b5-v5fgw" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.313898 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/97639e95-0dfe-4ed7-8702-f0de909b3c09-apiservice-cert\") pod \"metallb-operator-controller-manager-697697b9b5-v5fgw\" (UID: \"97639e95-0dfe-4ed7-8702-f0de909b3c09\") " pod="metallb-system/metallb-operator-controller-manager-697697b9b5-v5fgw" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.321077 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/97639e95-0dfe-4ed7-8702-f0de909b3c09-apiservice-cert\") pod \"metallb-operator-controller-manager-697697b9b5-v5fgw\" (UID: \"97639e95-0dfe-4ed7-8702-f0de909b3c09\") " pod="metallb-system/metallb-operator-controller-manager-697697b9b5-v5fgw" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.335971 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mp25n\" (UniqueName: \"kubernetes.io/projected/97639e95-0dfe-4ed7-8702-f0de909b3c09-kube-api-access-mp25n\") pod \"metallb-operator-controller-manager-697697b9b5-v5fgw\" (UID: \"97639e95-0dfe-4ed7-8702-f0de909b3c09\") " pod="metallb-system/metallb-operator-controller-manager-697697b9b5-v5fgw" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.336706 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/97639e95-0dfe-4ed7-8702-f0de909b3c09-webhook-cert\") pod \"metallb-operator-controller-manager-697697b9b5-v5fgw\" (UID: \"97639e95-0dfe-4ed7-8702-f0de909b3c09\") " pod="metallb-system/metallb-operator-controller-manager-697697b9b5-v5fgw" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.375312 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-697697b9b5-v5fgw" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.402990 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-c5d4d9978-tst7c"] Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.408695 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-c5d4d9978-tst7c" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.416373 4906 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-42zc2" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.416687 4906 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.416825 4906 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.492419 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-c5d4d9978-tst7c"] Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.518631 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4vwt\" (UniqueName: \"kubernetes.io/projected/9db8c955-a52a-43c4-b61c-67e5ac2e3938-kube-api-access-x4vwt\") pod \"metallb-operator-webhook-server-c5d4d9978-tst7c\" (UID: \"9db8c955-a52a-43c4-b61c-67e5ac2e3938\") " pod="metallb-system/metallb-operator-webhook-server-c5d4d9978-tst7c" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.518682 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9db8c955-a52a-43c4-b61c-67e5ac2e3938-apiservice-cert\") pod \"metallb-operator-webhook-server-c5d4d9978-tst7c\" (UID: \"9db8c955-a52a-43c4-b61c-67e5ac2e3938\") " pod="metallb-system/metallb-operator-webhook-server-c5d4d9978-tst7c" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.518701 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9db8c955-a52a-43c4-b61c-67e5ac2e3938-webhook-cert\") pod \"metallb-operator-webhook-server-c5d4d9978-tst7c\" (UID: \"9db8c955-a52a-43c4-b61c-67e5ac2e3938\") " pod="metallb-system/metallb-operator-webhook-server-c5d4d9978-tst7c" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.620346 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9db8c955-a52a-43c4-b61c-67e5ac2e3938-apiservice-cert\") pod \"metallb-operator-webhook-server-c5d4d9978-tst7c\" (UID: \"9db8c955-a52a-43c4-b61c-67e5ac2e3938\") " pod="metallb-system/metallb-operator-webhook-server-c5d4d9978-tst7c" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.620416 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9db8c955-a52a-43c4-b61c-67e5ac2e3938-webhook-cert\") pod \"metallb-operator-webhook-server-c5d4d9978-tst7c\" (UID: \"9db8c955-a52a-43c4-b61c-67e5ac2e3938\") " pod="metallb-system/metallb-operator-webhook-server-c5d4d9978-tst7c" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.620502 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4vwt\" (UniqueName: \"kubernetes.io/projected/9db8c955-a52a-43c4-b61c-67e5ac2e3938-kube-api-access-x4vwt\") pod \"metallb-operator-webhook-server-c5d4d9978-tst7c\" (UID: \"9db8c955-a52a-43c4-b61c-67e5ac2e3938\") " pod="metallb-system/metallb-operator-webhook-server-c5d4d9978-tst7c" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.636318 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9db8c955-a52a-43c4-b61c-67e5ac2e3938-apiservice-cert\") pod \"metallb-operator-webhook-server-c5d4d9978-tst7c\" (UID: \"9db8c955-a52a-43c4-b61c-67e5ac2e3938\") " pod="metallb-system/metallb-operator-webhook-server-c5d4d9978-tst7c" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.639209 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9db8c955-a52a-43c4-b61c-67e5ac2e3938-webhook-cert\") pod \"metallb-operator-webhook-server-c5d4d9978-tst7c\" (UID: \"9db8c955-a52a-43c4-b61c-67e5ac2e3938\") " pod="metallb-system/metallb-operator-webhook-server-c5d4d9978-tst7c" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.661637 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4vwt\" (UniqueName: \"kubernetes.io/projected/9db8c955-a52a-43c4-b61c-67e5ac2e3938-kube-api-access-x4vwt\") pod \"metallb-operator-webhook-server-c5d4d9978-tst7c\" (UID: \"9db8c955-a52a-43c4-b61c-67e5ac2e3938\") " pod="metallb-system/metallb-operator-webhook-server-c5d4d9978-tst7c" Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.721449 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-697697b9b5-v5fgw"] Feb 27 08:44:49 crc kubenswrapper[4906]: I0227 08:44:49.749275 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-c5d4d9978-tst7c" Feb 27 08:44:50 crc kubenswrapper[4906]: I0227 08:44:50.015050 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-c5d4d9978-tst7c"] Feb 27 08:44:50 crc kubenswrapper[4906]: W0227 08:44:50.023639 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9db8c955_a52a_43c4_b61c_67e5ac2e3938.slice/crio-a71690b82aa8629cc3bdc8dabac7967bb2f0d618cda97bfec1466836a07b01f0 WatchSource:0}: Error finding container a71690b82aa8629cc3bdc8dabac7967bb2f0d618cda97bfec1466836a07b01f0: Status 404 returned error can't find the container with id a71690b82aa8629cc3bdc8dabac7967bb2f0d618cda97bfec1466836a07b01f0 Feb 27 08:44:50 crc kubenswrapper[4906]: I0227 08:44:50.116416 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-c5d4d9978-tst7c" event={"ID":"9db8c955-a52a-43c4-b61c-67e5ac2e3938","Type":"ContainerStarted","Data":"a71690b82aa8629cc3bdc8dabac7967bb2f0d618cda97bfec1466836a07b01f0"} Feb 27 08:44:50 crc kubenswrapper[4906]: I0227 08:44:50.117727 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-697697b9b5-v5fgw" event={"ID":"97639e95-0dfe-4ed7-8702-f0de909b3c09","Type":"ContainerStarted","Data":"d4c6c99ac175d735b4e93c4270e2559ea0a3fb86e659eb7e35c6b0cec9e9142d"} Feb 27 08:44:56 crc kubenswrapper[4906]: I0227 08:44:56.356700 4906 scope.go:117] "RemoveContainer" containerID="6f9afb2763e4e7b5ed12d8fe21c2b4622f5745e9f65938cfc3f24772508abbdd" Feb 27 08:44:57 crc kubenswrapper[4906]: I0227 08:44:57.170489 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-697697b9b5-v5fgw" event={"ID":"97639e95-0dfe-4ed7-8702-f0de909b3c09","Type":"ContainerStarted","Data":"a9a0eba0169b256d9ccf7c7178357a56617cfd38a97eda0e80c63241e22b8e21"} Feb 27 08:44:57 crc kubenswrapper[4906]: I0227 08:44:57.170616 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-697697b9b5-v5fgw" Feb 27 08:44:57 crc kubenswrapper[4906]: I0227 08:44:57.172656 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-c5d4d9978-tst7c" event={"ID":"9db8c955-a52a-43c4-b61c-67e5ac2e3938","Type":"ContainerStarted","Data":"3b27fd73a83ab6e85361e29ab2f391d709cbf7563c7c6de983b4545c32344a39"} Feb 27 08:44:57 crc kubenswrapper[4906]: I0227 08:44:57.172987 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-c5d4d9978-tst7c" Feb 27 08:44:57 crc kubenswrapper[4906]: I0227 08:44:57.229328 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-c5d4d9978-tst7c" podStartSLOduration=2.045527407 podStartE2EDuration="8.229305988s" podCreationTimestamp="2026-02-27 08:44:49 +0000 UTC" firstStartedPulling="2026-02-27 08:44:50.028420979 +0000 UTC m=+988.422822589" lastFinishedPulling="2026-02-27 08:44:56.21219956 +0000 UTC m=+994.606601170" observedRunningTime="2026-02-27 08:44:57.225634831 +0000 UTC m=+995.620036461" watchObservedRunningTime="2026-02-27 08:44:57.229305988 +0000 UTC m=+995.623707608" Feb 27 08:44:57 crc kubenswrapper[4906]: I0227 08:44:57.231454 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-697697b9b5-v5fgw" podStartSLOduration=1.7821647760000001 podStartE2EDuration="8.231448434s" podCreationTimestamp="2026-02-27 08:44:49 +0000 UTC" firstStartedPulling="2026-02-27 08:44:49.742799232 +0000 UTC m=+988.137200842" lastFinishedPulling="2026-02-27 08:44:56.19208289 +0000 UTC m=+994.586484500" observedRunningTime="2026-02-27 08:44:57.203574099 +0000 UTC m=+995.597975729" watchObservedRunningTime="2026-02-27 08:44:57.231448434 +0000 UTC m=+995.625850064" Feb 27 08:45:00 crc kubenswrapper[4906]: I0227 08:45:00.164615 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv"] Feb 27 08:45:00 crc kubenswrapper[4906]: I0227 08:45:00.168236 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv" Feb 27 08:45:00 crc kubenswrapper[4906]: I0227 08:45:00.172982 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 27 08:45:00 crc kubenswrapper[4906]: I0227 08:45:00.173609 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 27 08:45:00 crc kubenswrapper[4906]: I0227 08:45:00.185545 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv"] Feb 27 08:45:00 crc kubenswrapper[4906]: I0227 08:45:00.186599 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/53ff7619-987e-456d-b6c4-5c1bc898787f-config-volume\") pod \"collect-profiles-29536365-gjgfv\" (UID: \"53ff7619-987e-456d-b6c4-5c1bc898787f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv" Feb 27 08:45:00 crc kubenswrapper[4906]: I0227 08:45:00.186757 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxr5q\" (UniqueName: \"kubernetes.io/projected/53ff7619-987e-456d-b6c4-5c1bc898787f-kube-api-access-nxr5q\") pod \"collect-profiles-29536365-gjgfv\" (UID: \"53ff7619-987e-456d-b6c4-5c1bc898787f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv" Feb 27 08:45:00 crc kubenswrapper[4906]: I0227 08:45:00.186904 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/53ff7619-987e-456d-b6c4-5c1bc898787f-secret-volume\") pod \"collect-profiles-29536365-gjgfv\" (UID: \"53ff7619-987e-456d-b6c4-5c1bc898787f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv" Feb 27 08:45:00 crc kubenswrapper[4906]: I0227 08:45:00.289011 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/53ff7619-987e-456d-b6c4-5c1bc898787f-config-volume\") pod \"collect-profiles-29536365-gjgfv\" (UID: \"53ff7619-987e-456d-b6c4-5c1bc898787f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv" Feb 27 08:45:00 crc kubenswrapper[4906]: I0227 08:45:00.289068 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxr5q\" (UniqueName: \"kubernetes.io/projected/53ff7619-987e-456d-b6c4-5c1bc898787f-kube-api-access-nxr5q\") pod \"collect-profiles-29536365-gjgfv\" (UID: \"53ff7619-987e-456d-b6c4-5c1bc898787f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv" Feb 27 08:45:00 crc kubenswrapper[4906]: I0227 08:45:00.289118 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/53ff7619-987e-456d-b6c4-5c1bc898787f-secret-volume\") pod \"collect-profiles-29536365-gjgfv\" (UID: \"53ff7619-987e-456d-b6c4-5c1bc898787f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv" Feb 27 08:45:00 crc kubenswrapper[4906]: I0227 08:45:00.291173 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/53ff7619-987e-456d-b6c4-5c1bc898787f-config-volume\") pod \"collect-profiles-29536365-gjgfv\" (UID: \"53ff7619-987e-456d-b6c4-5c1bc898787f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv" Feb 27 08:45:00 crc kubenswrapper[4906]: I0227 08:45:00.296026 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/53ff7619-987e-456d-b6c4-5c1bc898787f-secret-volume\") pod \"collect-profiles-29536365-gjgfv\" (UID: \"53ff7619-987e-456d-b6c4-5c1bc898787f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv" Feb 27 08:45:00 crc kubenswrapper[4906]: I0227 08:45:00.313475 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxr5q\" (UniqueName: \"kubernetes.io/projected/53ff7619-987e-456d-b6c4-5c1bc898787f-kube-api-access-nxr5q\") pod \"collect-profiles-29536365-gjgfv\" (UID: \"53ff7619-987e-456d-b6c4-5c1bc898787f\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv" Feb 27 08:45:00 crc kubenswrapper[4906]: I0227 08:45:00.489756 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv" Feb 27 08:45:00 crc kubenswrapper[4906]: I0227 08:45:00.962759 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv"] Feb 27 08:45:01 crc kubenswrapper[4906]: I0227 08:45:01.205519 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv" event={"ID":"53ff7619-987e-456d-b6c4-5c1bc898787f","Type":"ContainerStarted","Data":"1cd200aa62b95d1a739460aa5b14b2ed34037c71e6bff013a94ec7b6f3e05d65"} Feb 27 08:45:01 crc kubenswrapper[4906]: I0227 08:45:01.205586 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv" event={"ID":"53ff7619-987e-456d-b6c4-5c1bc898787f","Type":"ContainerStarted","Data":"7e91c290bac0b9e7468aa54ceba74e89fe946f37eaa7228caeb5ed9760579446"} Feb 27 08:45:01 crc kubenswrapper[4906]: I0227 08:45:01.223977 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv" podStartSLOduration=1.223951087 podStartE2EDuration="1.223951087s" podCreationTimestamp="2026-02-27 08:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:45:01.221378839 +0000 UTC m=+999.615780449" watchObservedRunningTime="2026-02-27 08:45:01.223951087 +0000 UTC m=+999.618352697" Feb 27 08:45:02 crc kubenswrapper[4906]: I0227 08:45:02.213040 4906 generic.go:334] "Generic (PLEG): container finished" podID="53ff7619-987e-456d-b6c4-5c1bc898787f" containerID="1cd200aa62b95d1a739460aa5b14b2ed34037c71e6bff013a94ec7b6f3e05d65" exitCode=0 Feb 27 08:45:02 crc kubenswrapper[4906]: I0227 08:45:02.213102 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv" event={"ID":"53ff7619-987e-456d-b6c4-5c1bc898787f","Type":"ContainerDied","Data":"1cd200aa62b95d1a739460aa5b14b2ed34037c71e6bff013a94ec7b6f3e05d65"} Feb 27 08:45:03 crc kubenswrapper[4906]: I0227 08:45:03.498846 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv" Feb 27 08:45:03 crc kubenswrapper[4906]: I0227 08:45:03.541667 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/53ff7619-987e-456d-b6c4-5c1bc898787f-secret-volume\") pod \"53ff7619-987e-456d-b6c4-5c1bc898787f\" (UID: \"53ff7619-987e-456d-b6c4-5c1bc898787f\") " Feb 27 08:45:03 crc kubenswrapper[4906]: I0227 08:45:03.542126 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nxr5q\" (UniqueName: \"kubernetes.io/projected/53ff7619-987e-456d-b6c4-5c1bc898787f-kube-api-access-nxr5q\") pod \"53ff7619-987e-456d-b6c4-5c1bc898787f\" (UID: \"53ff7619-987e-456d-b6c4-5c1bc898787f\") " Feb 27 08:45:03 crc kubenswrapper[4906]: I0227 08:45:03.542201 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/53ff7619-987e-456d-b6c4-5c1bc898787f-config-volume\") pod \"53ff7619-987e-456d-b6c4-5c1bc898787f\" (UID: \"53ff7619-987e-456d-b6c4-5c1bc898787f\") " Feb 27 08:45:03 crc kubenswrapper[4906]: I0227 08:45:03.543243 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53ff7619-987e-456d-b6c4-5c1bc898787f-config-volume" (OuterVolumeSpecName: "config-volume") pod "53ff7619-987e-456d-b6c4-5c1bc898787f" (UID: "53ff7619-987e-456d-b6c4-5c1bc898787f"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:45:03 crc kubenswrapper[4906]: I0227 08:45:03.548777 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53ff7619-987e-456d-b6c4-5c1bc898787f-kube-api-access-nxr5q" (OuterVolumeSpecName: "kube-api-access-nxr5q") pod "53ff7619-987e-456d-b6c4-5c1bc898787f" (UID: "53ff7619-987e-456d-b6c4-5c1bc898787f"). InnerVolumeSpecName "kube-api-access-nxr5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:45:03 crc kubenswrapper[4906]: I0227 08:45:03.548925 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53ff7619-987e-456d-b6c4-5c1bc898787f-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "53ff7619-987e-456d-b6c4-5c1bc898787f" (UID: "53ff7619-987e-456d-b6c4-5c1bc898787f"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:45:03 crc kubenswrapper[4906]: I0227 08:45:03.643551 4906 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/53ff7619-987e-456d-b6c4-5c1bc898787f-config-volume\") on node \"crc\" DevicePath \"\"" Feb 27 08:45:03 crc kubenswrapper[4906]: I0227 08:45:03.643589 4906 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/53ff7619-987e-456d-b6c4-5c1bc898787f-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 27 08:45:03 crc kubenswrapper[4906]: I0227 08:45:03.643601 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nxr5q\" (UniqueName: \"kubernetes.io/projected/53ff7619-987e-456d-b6c4-5c1bc898787f-kube-api-access-nxr5q\") on node \"crc\" DevicePath \"\"" Feb 27 08:45:04 crc kubenswrapper[4906]: I0227 08:45:04.228236 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv" event={"ID":"53ff7619-987e-456d-b6c4-5c1bc898787f","Type":"ContainerDied","Data":"7e91c290bac0b9e7468aa54ceba74e89fe946f37eaa7228caeb5ed9760579446"} Feb 27 08:45:04 crc kubenswrapper[4906]: I0227 08:45:04.228282 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e91c290bac0b9e7468aa54ceba74e89fe946f37eaa7228caeb5ed9760579446" Feb 27 08:45:04 crc kubenswrapper[4906]: I0227 08:45:04.228299 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29536365-gjgfv" Feb 27 08:45:09 crc kubenswrapper[4906]: I0227 08:45:09.754661 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-c5d4d9978-tst7c" Feb 27 08:45:29 crc kubenswrapper[4906]: I0227 08:45:29.378763 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-697697b9b5-v5fgw" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.260329 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-zws26"] Feb 27 08:45:30 crc kubenswrapper[4906]: E0227 08:45:30.260910 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53ff7619-987e-456d-b6c4-5c1bc898787f" containerName="collect-profiles" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.260929 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="53ff7619-987e-456d-b6c4-5c1bc898787f" containerName="collect-profiles" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.261036 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="53ff7619-987e-456d-b6c4-5c1bc898787f" containerName="collect-profiles" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.263141 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.265594 4906 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.265671 4906 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-lq24w" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.265719 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.290029 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7f989f654f-p2vgn"] Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.290897 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7f989f654f-p2vgn" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.292938 4906 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.307488 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7f989f654f-p2vgn"] Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.379472 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-nqccr"] Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.380621 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-nqccr" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.382753 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.382765 4906 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.382985 4906 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-bk8r9" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.383164 4906 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.401993 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-86ddb6bd46-qnp6d"] Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.403008 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-86ddb6bd46-qnp6d" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.405173 4906 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.416047 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9c195ff8-f64c-4827-8826-dde8f2583e40-metrics\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.416129 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ad499997-74c4-4c13-a9f3-1dec95a2a087-cert\") pod \"frr-k8s-webhook-server-7f989f654f-p2vgn\" (UID: \"ad499997-74c4-4c13-a9f3-1dec95a2a087\") " pod="metallb-system/frr-k8s-webhook-server-7f989f654f-p2vgn" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.416179 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxnqq\" (UniqueName: \"kubernetes.io/projected/ad499997-74c4-4c13-a9f3-1dec95a2a087-kube-api-access-hxnqq\") pod \"frr-k8s-webhook-server-7f989f654f-p2vgn\" (UID: \"ad499997-74c4-4c13-a9f3-1dec95a2a087\") " pod="metallb-system/frr-k8s-webhook-server-7f989f654f-p2vgn" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.416211 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmlt8\" (UniqueName: \"kubernetes.io/projected/9c195ff8-f64c-4827-8826-dde8f2583e40-kube-api-access-vmlt8\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.416243 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9c195ff8-f64c-4827-8826-dde8f2583e40-reloader\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.416277 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9c195ff8-f64c-4827-8826-dde8f2583e40-frr-startup\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.416300 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9c195ff8-f64c-4827-8826-dde8f2583e40-frr-sockets\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.416325 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9c195ff8-f64c-4827-8826-dde8f2583e40-frr-conf\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.416359 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9c195ff8-f64c-4827-8826-dde8f2583e40-metrics-certs\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.432834 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-86ddb6bd46-qnp6d"] Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.517204 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9c195ff8-f64c-4827-8826-dde8f2583e40-metrics\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.517297 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkhql\" (UniqueName: \"kubernetes.io/projected/a1f334d4-49ed-424f-9f86-6cc0ccfccca9-kube-api-access-jkhql\") pod \"controller-86ddb6bd46-qnp6d\" (UID: \"a1f334d4-49ed-424f-9f86-6cc0ccfccca9\") " pod="metallb-system/controller-86ddb6bd46-qnp6d" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.517331 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a1f334d4-49ed-424f-9f86-6cc0ccfccca9-metrics-certs\") pod \"controller-86ddb6bd46-qnp6d\" (UID: \"a1f334d4-49ed-424f-9f86-6cc0ccfccca9\") " pod="metallb-system/controller-86ddb6bd46-qnp6d" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.517361 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a1f334d4-49ed-424f-9f86-6cc0ccfccca9-cert\") pod \"controller-86ddb6bd46-qnp6d\" (UID: \"a1f334d4-49ed-424f-9f86-6cc0ccfccca9\") " pod="metallb-system/controller-86ddb6bd46-qnp6d" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.517387 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ad499997-74c4-4c13-a9f3-1dec95a2a087-cert\") pod \"frr-k8s-webhook-server-7f989f654f-p2vgn\" (UID: \"ad499997-74c4-4c13-a9f3-1dec95a2a087\") " pod="metallb-system/frr-k8s-webhook-server-7f989f654f-p2vgn" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.517420 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf-metallb-excludel2\") pod \"speaker-nqccr\" (UID: \"e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf\") " pod="metallb-system/speaker-nqccr" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.517448 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jthrg\" (UniqueName: \"kubernetes.io/projected/e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf-kube-api-access-jthrg\") pod \"speaker-nqccr\" (UID: \"e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf\") " pod="metallb-system/speaker-nqccr" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.517480 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxnqq\" (UniqueName: \"kubernetes.io/projected/ad499997-74c4-4c13-a9f3-1dec95a2a087-kube-api-access-hxnqq\") pod \"frr-k8s-webhook-server-7f989f654f-p2vgn\" (UID: \"ad499997-74c4-4c13-a9f3-1dec95a2a087\") " pod="metallb-system/frr-k8s-webhook-server-7f989f654f-p2vgn" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.517507 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmlt8\" (UniqueName: \"kubernetes.io/projected/9c195ff8-f64c-4827-8826-dde8f2583e40-kube-api-access-vmlt8\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.517539 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9c195ff8-f64c-4827-8826-dde8f2583e40-reloader\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.517567 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf-memberlist\") pod \"speaker-nqccr\" (UID: \"e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf\") " pod="metallb-system/speaker-nqccr" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.517599 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9c195ff8-f64c-4827-8826-dde8f2583e40-frr-startup\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.517624 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9c195ff8-f64c-4827-8826-dde8f2583e40-frr-sockets\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.517644 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9c195ff8-f64c-4827-8826-dde8f2583e40-frr-conf\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.517670 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9c195ff8-f64c-4827-8826-dde8f2583e40-metrics-certs\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.517823 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf-metrics-certs\") pod \"speaker-nqccr\" (UID: \"e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf\") " pod="metallb-system/speaker-nqccr" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.517853 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/9c195ff8-f64c-4827-8826-dde8f2583e40-metrics\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: E0227 08:45:30.517925 4906 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.518000 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/9c195ff8-f64c-4827-8826-dde8f2583e40-reloader\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: E0227 08:45:30.518113 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9c195ff8-f64c-4827-8826-dde8f2583e40-metrics-certs podName:9c195ff8-f64c-4827-8826-dde8f2583e40 nodeName:}" failed. No retries permitted until 2026-02-27 08:45:31.018046591 +0000 UTC m=+1029.412448201 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/9c195ff8-f64c-4827-8826-dde8f2583e40-metrics-certs") pod "frr-k8s-zws26" (UID: "9c195ff8-f64c-4827-8826-dde8f2583e40") : secret "frr-k8s-certs-secret" not found Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.518172 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/9c195ff8-f64c-4827-8826-dde8f2583e40-frr-sockets\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.518406 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/9c195ff8-f64c-4827-8826-dde8f2583e40-frr-conf\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.519671 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/9c195ff8-f64c-4827-8826-dde8f2583e40-frr-startup\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.538694 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ad499997-74c4-4c13-a9f3-1dec95a2a087-cert\") pod \"frr-k8s-webhook-server-7f989f654f-p2vgn\" (UID: \"ad499997-74c4-4c13-a9f3-1dec95a2a087\") " pod="metallb-system/frr-k8s-webhook-server-7f989f654f-p2vgn" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.539103 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxnqq\" (UniqueName: \"kubernetes.io/projected/ad499997-74c4-4c13-a9f3-1dec95a2a087-kube-api-access-hxnqq\") pod \"frr-k8s-webhook-server-7f989f654f-p2vgn\" (UID: \"ad499997-74c4-4c13-a9f3-1dec95a2a087\") " pod="metallb-system/frr-k8s-webhook-server-7f989f654f-p2vgn" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.540599 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmlt8\" (UniqueName: \"kubernetes.io/projected/9c195ff8-f64c-4827-8826-dde8f2583e40-kube-api-access-vmlt8\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.605282 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7f989f654f-p2vgn" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.619097 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jthrg\" (UniqueName: \"kubernetes.io/projected/e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf-kube-api-access-jthrg\") pod \"speaker-nqccr\" (UID: \"e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf\") " pod="metallb-system/speaker-nqccr" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.619168 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf-memberlist\") pod \"speaker-nqccr\" (UID: \"e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf\") " pod="metallb-system/speaker-nqccr" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.619217 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf-metrics-certs\") pod \"speaker-nqccr\" (UID: \"e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf\") " pod="metallb-system/speaker-nqccr" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.619284 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkhql\" (UniqueName: \"kubernetes.io/projected/a1f334d4-49ed-424f-9f86-6cc0ccfccca9-kube-api-access-jkhql\") pod \"controller-86ddb6bd46-qnp6d\" (UID: \"a1f334d4-49ed-424f-9f86-6cc0ccfccca9\") " pod="metallb-system/controller-86ddb6bd46-qnp6d" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.619313 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a1f334d4-49ed-424f-9f86-6cc0ccfccca9-metrics-certs\") pod \"controller-86ddb6bd46-qnp6d\" (UID: \"a1f334d4-49ed-424f-9f86-6cc0ccfccca9\") " pod="metallb-system/controller-86ddb6bd46-qnp6d" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.619340 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a1f334d4-49ed-424f-9f86-6cc0ccfccca9-cert\") pod \"controller-86ddb6bd46-qnp6d\" (UID: \"a1f334d4-49ed-424f-9f86-6cc0ccfccca9\") " pod="metallb-system/controller-86ddb6bd46-qnp6d" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.619369 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf-metallb-excludel2\") pod \"speaker-nqccr\" (UID: \"e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf\") " pod="metallb-system/speaker-nqccr" Feb 27 08:45:30 crc kubenswrapper[4906]: E0227 08:45:30.619365 4906 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Feb 27 08:45:30 crc kubenswrapper[4906]: E0227 08:45:30.619464 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf-memberlist podName:e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf nodeName:}" failed. No retries permitted until 2026-02-27 08:45:31.119438003 +0000 UTC m=+1029.513839803 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf-memberlist") pod "speaker-nqccr" (UID: "e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf") : secret "metallb-memberlist" not found Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.620472 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf-metallb-excludel2\") pod \"speaker-nqccr\" (UID: \"e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf\") " pod="metallb-system/speaker-nqccr" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.622485 4906 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.623147 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a1f334d4-49ed-424f-9f86-6cc0ccfccca9-metrics-certs\") pod \"controller-86ddb6bd46-qnp6d\" (UID: \"a1f334d4-49ed-424f-9f86-6cc0ccfccca9\") " pod="metallb-system/controller-86ddb6bd46-qnp6d" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.623872 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf-metrics-certs\") pod \"speaker-nqccr\" (UID: \"e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf\") " pod="metallb-system/speaker-nqccr" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.634149 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a1f334d4-49ed-424f-9f86-6cc0ccfccca9-cert\") pod \"controller-86ddb6bd46-qnp6d\" (UID: \"a1f334d4-49ed-424f-9f86-6cc0ccfccca9\") " pod="metallb-system/controller-86ddb6bd46-qnp6d" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.644646 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jthrg\" (UniqueName: \"kubernetes.io/projected/e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf-kube-api-access-jthrg\") pod \"speaker-nqccr\" (UID: \"e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf\") " pod="metallb-system/speaker-nqccr" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.649846 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkhql\" (UniqueName: \"kubernetes.io/projected/a1f334d4-49ed-424f-9f86-6cc0ccfccca9-kube-api-access-jkhql\") pod \"controller-86ddb6bd46-qnp6d\" (UID: \"a1f334d4-49ed-424f-9f86-6cc0ccfccca9\") " pod="metallb-system/controller-86ddb6bd46-qnp6d" Feb 27 08:45:30 crc kubenswrapper[4906]: I0227 08:45:30.718158 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-86ddb6bd46-qnp6d" Feb 27 08:45:31 crc kubenswrapper[4906]: I0227 08:45:31.029322 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9c195ff8-f64c-4827-8826-dde8f2583e40-metrics-certs\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:31 crc kubenswrapper[4906]: I0227 08:45:31.032705 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/9c195ff8-f64c-4827-8826-dde8f2583e40-metrics-certs\") pod \"frr-k8s-zws26\" (UID: \"9c195ff8-f64c-4827-8826-dde8f2583e40\") " pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:31 crc kubenswrapper[4906]: I0227 08:45:31.090592 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7f989f654f-p2vgn"] Feb 27 08:45:31 crc kubenswrapper[4906]: I0227 08:45:31.131110 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf-memberlist\") pod \"speaker-nqccr\" (UID: \"e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf\") " pod="metallb-system/speaker-nqccr" Feb 27 08:45:31 crc kubenswrapper[4906]: E0227 08:45:31.131366 4906 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Feb 27 08:45:31 crc kubenswrapper[4906]: E0227 08:45:31.131441 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf-memberlist podName:e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf nodeName:}" failed. No retries permitted until 2026-02-27 08:45:32.131421618 +0000 UTC m=+1030.525823238 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf-memberlist") pod "speaker-nqccr" (UID: "e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf") : secret "metallb-memberlist" not found Feb 27 08:45:31 crc kubenswrapper[4906]: I0227 08:45:31.162625 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-86ddb6bd46-qnp6d"] Feb 27 08:45:31 crc kubenswrapper[4906]: I0227 08:45:31.183706 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:31 crc kubenswrapper[4906]: I0227 08:45:31.424606 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-86ddb6bd46-qnp6d" event={"ID":"a1f334d4-49ed-424f-9f86-6cc0ccfccca9","Type":"ContainerStarted","Data":"9071de48899d4870e73e48fd809e62755ebd59c231ec0749bf721e4380551f6c"} Feb 27 08:45:31 crc kubenswrapper[4906]: I0227 08:45:31.425124 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-86ddb6bd46-qnp6d" event={"ID":"a1f334d4-49ed-424f-9f86-6cc0ccfccca9","Type":"ContainerStarted","Data":"df5a538df269e7d4ebe8191cc93bf6be1ec96ca482943aba5b4b6343c64ff45a"} Feb 27 08:45:31 crc kubenswrapper[4906]: I0227 08:45:31.427948 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zws26" event={"ID":"9c195ff8-f64c-4827-8826-dde8f2583e40","Type":"ContainerStarted","Data":"c40446944f55ff097b969698f203b83ba5e6c2d770fc6d116f59f3af233c9c7d"} Feb 27 08:45:31 crc kubenswrapper[4906]: I0227 08:45:31.429204 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7f989f654f-p2vgn" event={"ID":"ad499997-74c4-4c13-a9f3-1dec95a2a087","Type":"ContainerStarted","Data":"1ab8a560ec87ab42f2e827080961eac6a338264a0e0198fd131c3725c716ffcd"} Feb 27 08:45:32 crc kubenswrapper[4906]: I0227 08:45:32.147037 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf-memberlist\") pod \"speaker-nqccr\" (UID: \"e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf\") " pod="metallb-system/speaker-nqccr" Feb 27 08:45:32 crc kubenswrapper[4906]: I0227 08:45:32.155045 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf-memberlist\") pod \"speaker-nqccr\" (UID: \"e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf\") " pod="metallb-system/speaker-nqccr" Feb 27 08:45:32 crc kubenswrapper[4906]: I0227 08:45:32.195395 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-nqccr" Feb 27 08:45:32 crc kubenswrapper[4906]: W0227 08:45:32.221732 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1e76a6c_9ad0_41cd_9ed3_a1232c9674cf.slice/crio-07e721558f4087e1df4155db03747fcc305b93153d99a0e9c0ecad2bbd6bff1d WatchSource:0}: Error finding container 07e721558f4087e1df4155db03747fcc305b93153d99a0e9c0ecad2bbd6bff1d: Status 404 returned error can't find the container with id 07e721558f4087e1df4155db03747fcc305b93153d99a0e9c0ecad2bbd6bff1d Feb 27 08:45:32 crc kubenswrapper[4906]: I0227 08:45:32.442645 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-86ddb6bd46-qnp6d" event={"ID":"a1f334d4-49ed-424f-9f86-6cc0ccfccca9","Type":"ContainerStarted","Data":"50245f602714132dcae2dcad4a7c30d0dfe10256e8817197db7c1073ff993d7f"} Feb 27 08:45:32 crc kubenswrapper[4906]: I0227 08:45:32.442800 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-86ddb6bd46-qnp6d" Feb 27 08:45:32 crc kubenswrapper[4906]: I0227 08:45:32.447255 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-nqccr" event={"ID":"e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf","Type":"ContainerStarted","Data":"07e721558f4087e1df4155db03747fcc305b93153d99a0e9c0ecad2bbd6bff1d"} Feb 27 08:45:32 crc kubenswrapper[4906]: I0227 08:45:32.574862 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-86ddb6bd46-qnp6d" podStartSLOduration=2.574827103 podStartE2EDuration="2.574827103s" podCreationTimestamp="2026-02-27 08:45:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:45:32.462285177 +0000 UTC m=+1030.856686777" watchObservedRunningTime="2026-02-27 08:45:32.574827103 +0000 UTC m=+1030.969228713" Feb 27 08:45:33 crc kubenswrapper[4906]: I0227 08:45:33.463877 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-nqccr" event={"ID":"e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf","Type":"ContainerStarted","Data":"377187543b122ea9110781e6931a39564aeb840d3f778ea5e3d6bf3d467083f5"} Feb 27 08:45:33 crc kubenswrapper[4906]: I0227 08:45:33.464430 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-nqccr" event={"ID":"e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf","Type":"ContainerStarted","Data":"ff9c6972ea961f4e0c0f9afc120bc3118cb20fbc2028a70c82e86b340d4f5c6c"} Feb 27 08:45:33 crc kubenswrapper[4906]: I0227 08:45:33.498024 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-nqccr" podStartSLOduration=3.497999835 podStartE2EDuration="3.497999835s" podCreationTimestamp="2026-02-27 08:45:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:45:33.49665411 +0000 UTC m=+1031.891055720" watchObservedRunningTime="2026-02-27 08:45:33.497999835 +0000 UTC m=+1031.892401445" Feb 27 08:45:34 crc kubenswrapper[4906]: I0227 08:45:34.489651 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-nqccr" Feb 27 08:45:39 crc kubenswrapper[4906]: I0227 08:45:39.529353 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7f989f654f-p2vgn" event={"ID":"ad499997-74c4-4c13-a9f3-1dec95a2a087","Type":"ContainerStarted","Data":"c2ad0b52589a5fe6994fcbbf21dc424f07e42a6adaa21d07669c8e3032f3bf0a"} Feb 27 08:45:39 crc kubenswrapper[4906]: I0227 08:45:39.531639 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7f989f654f-p2vgn" Feb 27 08:45:39 crc kubenswrapper[4906]: I0227 08:45:39.533368 4906 generic.go:334] "Generic (PLEG): container finished" podID="9c195ff8-f64c-4827-8826-dde8f2583e40" containerID="c915c6822a9fcca503c534993274e48dc91246c5e5a1eec7f80d5f40cdd8986d" exitCode=0 Feb 27 08:45:39 crc kubenswrapper[4906]: I0227 08:45:39.533426 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zws26" event={"ID":"9c195ff8-f64c-4827-8826-dde8f2583e40","Type":"ContainerDied","Data":"c915c6822a9fcca503c534993274e48dc91246c5e5a1eec7f80d5f40cdd8986d"} Feb 27 08:45:39 crc kubenswrapper[4906]: I0227 08:45:39.569179 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7f989f654f-p2vgn" podStartSLOduration=1.790326152 podStartE2EDuration="9.569149678s" podCreationTimestamp="2026-02-27 08:45:30 +0000 UTC" firstStartedPulling="2026-02-27 08:45:31.099220929 +0000 UTC m=+1029.493622539" lastFinishedPulling="2026-02-27 08:45:38.878044455 +0000 UTC m=+1037.272446065" observedRunningTime="2026-02-27 08:45:39.546471761 +0000 UTC m=+1037.940873371" watchObservedRunningTime="2026-02-27 08:45:39.569149678 +0000 UTC m=+1037.963551288" Feb 27 08:45:40 crc kubenswrapper[4906]: I0227 08:45:40.543721 4906 generic.go:334] "Generic (PLEG): container finished" podID="9c195ff8-f64c-4827-8826-dde8f2583e40" containerID="629dc6121225d362417ae478ed7db4a7f2b105d7358b037abd38aecdc3009517" exitCode=0 Feb 27 08:45:40 crc kubenswrapper[4906]: I0227 08:45:40.543809 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zws26" event={"ID":"9c195ff8-f64c-4827-8826-dde8f2583e40","Type":"ContainerDied","Data":"629dc6121225d362417ae478ed7db4a7f2b105d7358b037abd38aecdc3009517"} Feb 27 08:45:41 crc kubenswrapper[4906]: I0227 08:45:41.554258 4906 generic.go:334] "Generic (PLEG): container finished" podID="9c195ff8-f64c-4827-8826-dde8f2583e40" containerID="e09dc98ed9a1d3a09c48614e55df50a6ea857af26f87e99c8e18d58d7024dcf2" exitCode=0 Feb 27 08:45:41 crc kubenswrapper[4906]: I0227 08:45:41.554458 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zws26" event={"ID":"9c195ff8-f64c-4827-8826-dde8f2583e40","Type":"ContainerDied","Data":"e09dc98ed9a1d3a09c48614e55df50a6ea857af26f87e99c8e18d58d7024dcf2"} Feb 27 08:45:42 crc kubenswrapper[4906]: I0227 08:45:42.200275 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-nqccr" Feb 27 08:45:42 crc kubenswrapper[4906]: I0227 08:45:42.574943 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zws26" event={"ID":"9c195ff8-f64c-4827-8826-dde8f2583e40","Type":"ContainerStarted","Data":"4b5df845b2c00fa3343295b6227981744105fef7e40d49edbdea542abcec53e6"} Feb 27 08:45:42 crc kubenswrapper[4906]: I0227 08:45:42.575532 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zws26" event={"ID":"9c195ff8-f64c-4827-8826-dde8f2583e40","Type":"ContainerStarted","Data":"f56ee3f890a562d74de8eff9b61a071d2c2d0798fd84fd5bf6d2b58a1ee179cc"} Feb 27 08:45:42 crc kubenswrapper[4906]: I0227 08:45:42.575548 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zws26" event={"ID":"9c195ff8-f64c-4827-8826-dde8f2583e40","Type":"ContainerStarted","Data":"0b34572a25bc80bdeb836a1ee8967f34a6f6eefbdfb0cec61f6c7467c1c9bd6a"} Feb 27 08:45:42 crc kubenswrapper[4906]: I0227 08:45:42.575562 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zws26" event={"ID":"9c195ff8-f64c-4827-8826-dde8f2583e40","Type":"ContainerStarted","Data":"ab1eabd5d85009cef578adc6a6974ab6aca1d0c3c16ce36b3cf0a5fbfcc5a90a"} Feb 27 08:45:42 crc kubenswrapper[4906]: I0227 08:45:42.575574 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zws26" event={"ID":"9c195ff8-f64c-4827-8826-dde8f2583e40","Type":"ContainerStarted","Data":"f478956c2e2de3e63033244dd8a7f7aa734f25d0749f94aa35035fd507da88fe"} Feb 27 08:45:43 crc kubenswrapper[4906]: I0227 08:45:43.588651 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-zws26" event={"ID":"9c195ff8-f64c-4827-8826-dde8f2583e40","Type":"ContainerStarted","Data":"c6971e0fb9b6a780a08ede575fe63c7c31e57eec0e5bd5571f642b2f41acb4cc"} Feb 27 08:45:43 crc kubenswrapper[4906]: I0227 08:45:43.590043 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:43 crc kubenswrapper[4906]: I0227 08:45:43.619808 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-zws26" podStartSLOduration=6.078576274 podStartE2EDuration="13.619777628s" podCreationTimestamp="2026-02-27 08:45:30 +0000 UTC" firstStartedPulling="2026-02-27 08:45:31.305466156 +0000 UTC m=+1029.699867766" lastFinishedPulling="2026-02-27 08:45:38.84666751 +0000 UTC m=+1037.241069120" observedRunningTime="2026-02-27 08:45:43.614626043 +0000 UTC m=+1042.009027673" watchObservedRunningTime="2026-02-27 08:45:43.619777628 +0000 UTC m=+1042.014179258" Feb 27 08:45:44 crc kubenswrapper[4906]: I0227 08:45:44.896462 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-cb5vd"] Feb 27 08:45:44 crc kubenswrapper[4906]: I0227 08:45:44.898734 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-cb5vd" Feb 27 08:45:44 crc kubenswrapper[4906]: I0227 08:45:44.901675 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Feb 27 08:45:44 crc kubenswrapper[4906]: I0227 08:45:44.902045 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Feb 27 08:45:44 crc kubenswrapper[4906]: I0227 08:45:44.908756 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-pkhsg" Feb 27 08:45:44 crc kubenswrapper[4906]: I0227 08:45:44.910265 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-cb5vd"] Feb 27 08:45:44 crc kubenswrapper[4906]: I0227 08:45:44.969933 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djhvf\" (UniqueName: \"kubernetes.io/projected/72309fd3-e555-4817-ae9d-cce503cf41b8-kube-api-access-djhvf\") pod \"openstack-operator-index-cb5vd\" (UID: \"72309fd3-e555-4817-ae9d-cce503cf41b8\") " pod="openstack-operators/openstack-operator-index-cb5vd" Feb 27 08:45:45 crc kubenswrapper[4906]: I0227 08:45:45.072138 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djhvf\" (UniqueName: \"kubernetes.io/projected/72309fd3-e555-4817-ae9d-cce503cf41b8-kube-api-access-djhvf\") pod \"openstack-operator-index-cb5vd\" (UID: \"72309fd3-e555-4817-ae9d-cce503cf41b8\") " pod="openstack-operators/openstack-operator-index-cb5vd" Feb 27 08:45:45 crc kubenswrapper[4906]: I0227 08:45:45.102322 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djhvf\" (UniqueName: \"kubernetes.io/projected/72309fd3-e555-4817-ae9d-cce503cf41b8-kube-api-access-djhvf\") pod \"openstack-operator-index-cb5vd\" (UID: \"72309fd3-e555-4817-ae9d-cce503cf41b8\") " pod="openstack-operators/openstack-operator-index-cb5vd" Feb 27 08:45:45 crc kubenswrapper[4906]: I0227 08:45:45.219179 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-cb5vd" Feb 27 08:45:45 crc kubenswrapper[4906]: I0227 08:45:45.651599 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-cb5vd"] Feb 27 08:45:45 crc kubenswrapper[4906]: W0227 08:45:45.661093 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72309fd3_e555_4817_ae9d_cce503cf41b8.slice/crio-521051c19b9c679a9585de75f96e6a7097e60f22388c2a073a215b90ee9114ed WatchSource:0}: Error finding container 521051c19b9c679a9585de75f96e6a7097e60f22388c2a073a215b90ee9114ed: Status 404 returned error can't find the container with id 521051c19b9c679a9585de75f96e6a7097e60f22388c2a073a215b90ee9114ed Feb 27 08:45:46 crc kubenswrapper[4906]: I0227 08:45:46.184641 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:46 crc kubenswrapper[4906]: I0227 08:45:46.223810 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:46 crc kubenswrapper[4906]: I0227 08:45:46.616686 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-cb5vd" event={"ID":"72309fd3-e555-4817-ae9d-cce503cf41b8","Type":"ContainerStarted","Data":"521051c19b9c679a9585de75f96e6a7097e60f22388c2a073a215b90ee9114ed"} Feb 27 08:45:48 crc kubenswrapper[4906]: I0227 08:45:48.260072 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-cb5vd"] Feb 27 08:45:48 crc kubenswrapper[4906]: I0227 08:45:48.877582 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-bm4k8"] Feb 27 08:45:48 crc kubenswrapper[4906]: I0227 08:45:48.878669 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-bm4k8" Feb 27 08:45:48 crc kubenswrapper[4906]: I0227 08:45:48.904113 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-bm4k8"] Feb 27 08:45:48 crc kubenswrapper[4906]: I0227 08:45:48.928528 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrbzv\" (UniqueName: \"kubernetes.io/projected/f5ec0807-fc16-427f-99be-2cc63d0ceb23-kube-api-access-nrbzv\") pod \"openstack-operator-index-bm4k8\" (UID: \"f5ec0807-fc16-427f-99be-2cc63d0ceb23\") " pod="openstack-operators/openstack-operator-index-bm4k8" Feb 27 08:45:49 crc kubenswrapper[4906]: I0227 08:45:49.029550 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrbzv\" (UniqueName: \"kubernetes.io/projected/f5ec0807-fc16-427f-99be-2cc63d0ceb23-kube-api-access-nrbzv\") pod \"openstack-operator-index-bm4k8\" (UID: \"f5ec0807-fc16-427f-99be-2cc63d0ceb23\") " pod="openstack-operators/openstack-operator-index-bm4k8" Feb 27 08:45:49 crc kubenswrapper[4906]: I0227 08:45:49.052913 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrbzv\" (UniqueName: \"kubernetes.io/projected/f5ec0807-fc16-427f-99be-2cc63d0ceb23-kube-api-access-nrbzv\") pod \"openstack-operator-index-bm4k8\" (UID: \"f5ec0807-fc16-427f-99be-2cc63d0ceb23\") " pod="openstack-operators/openstack-operator-index-bm4k8" Feb 27 08:45:49 crc kubenswrapper[4906]: I0227 08:45:49.203850 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-bm4k8" Feb 27 08:45:49 crc kubenswrapper[4906]: I0227 08:45:49.934925 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-bm4k8"] Feb 27 08:45:50 crc kubenswrapper[4906]: I0227 08:45:50.611832 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7f989f654f-p2vgn" Feb 27 08:45:50 crc kubenswrapper[4906]: I0227 08:45:50.655266 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-bm4k8" event={"ID":"f5ec0807-fc16-427f-99be-2cc63d0ceb23","Type":"ContainerStarted","Data":"95e39e6884f36d1b7aabb3c2e3888731ea65f0936338e6f427b0fbac4ffd19c7"} Feb 27 08:45:50 crc kubenswrapper[4906]: I0227 08:45:50.723624 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-86ddb6bd46-qnp6d" Feb 27 08:45:51 crc kubenswrapper[4906]: I0227 08:45:51.187616 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-zws26" Feb 27 08:45:51 crc kubenswrapper[4906]: I0227 08:45:51.667544 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-cb5vd" event={"ID":"72309fd3-e555-4817-ae9d-cce503cf41b8","Type":"ContainerStarted","Data":"0512fe903a82ce0227f8f154244ff1d624ac68e57c8e265cd22c4a2a1ba35bea"} Feb 27 08:45:51 crc kubenswrapper[4906]: I0227 08:45:51.667722 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-cb5vd" podUID="72309fd3-e555-4817-ae9d-cce503cf41b8" containerName="registry-server" containerID="cri-o://0512fe903a82ce0227f8f154244ff1d624ac68e57c8e265cd22c4a2a1ba35bea" gracePeriod=2 Feb 27 08:45:51 crc kubenswrapper[4906]: I0227 08:45:51.669557 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-bm4k8" event={"ID":"f5ec0807-fc16-427f-99be-2cc63d0ceb23","Type":"ContainerStarted","Data":"da8d9f897e3f6d6fc5031aa1836cbdcfb1cd38eba0c406238d4f81c67707c052"} Feb 27 08:45:51 crc kubenswrapper[4906]: I0227 08:45:51.690766 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-cb5vd" podStartSLOduration=2.485617264 podStartE2EDuration="7.690686299s" podCreationTimestamp="2026-02-27 08:45:44 +0000 UTC" firstStartedPulling="2026-02-27 08:45:45.663465485 +0000 UTC m=+1044.057867115" lastFinishedPulling="2026-02-27 08:45:50.86853454 +0000 UTC m=+1049.262936150" observedRunningTime="2026-02-27 08:45:51.690196436 +0000 UTC m=+1050.084598066" watchObservedRunningTime="2026-02-27 08:45:51.690686299 +0000 UTC m=+1050.085087909" Feb 27 08:45:51 crc kubenswrapper[4906]: I0227 08:45:51.714971 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-bm4k8" podStartSLOduration=3.020449846 podStartE2EDuration="3.714942447s" podCreationTimestamp="2026-02-27 08:45:48 +0000 UTC" firstStartedPulling="2026-02-27 08:45:50.172629312 +0000 UTC m=+1048.567030912" lastFinishedPulling="2026-02-27 08:45:50.867121903 +0000 UTC m=+1049.261523513" observedRunningTime="2026-02-27 08:45:51.710479039 +0000 UTC m=+1050.104880649" watchObservedRunningTime="2026-02-27 08:45:51.714942447 +0000 UTC m=+1050.109344057" Feb 27 08:45:52 crc kubenswrapper[4906]: I0227 08:45:52.071825 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-cb5vd" Feb 27 08:45:52 crc kubenswrapper[4906]: I0227 08:45:52.207249 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-djhvf\" (UniqueName: \"kubernetes.io/projected/72309fd3-e555-4817-ae9d-cce503cf41b8-kube-api-access-djhvf\") pod \"72309fd3-e555-4817-ae9d-cce503cf41b8\" (UID: \"72309fd3-e555-4817-ae9d-cce503cf41b8\") " Feb 27 08:45:52 crc kubenswrapper[4906]: I0227 08:45:52.214972 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72309fd3-e555-4817-ae9d-cce503cf41b8-kube-api-access-djhvf" (OuterVolumeSpecName: "kube-api-access-djhvf") pod "72309fd3-e555-4817-ae9d-cce503cf41b8" (UID: "72309fd3-e555-4817-ae9d-cce503cf41b8"). InnerVolumeSpecName "kube-api-access-djhvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:45:52 crc kubenswrapper[4906]: I0227 08:45:52.309401 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-djhvf\" (UniqueName: \"kubernetes.io/projected/72309fd3-e555-4817-ae9d-cce503cf41b8-kube-api-access-djhvf\") on node \"crc\" DevicePath \"\"" Feb 27 08:45:52 crc kubenswrapper[4906]: I0227 08:45:52.678406 4906 generic.go:334] "Generic (PLEG): container finished" podID="72309fd3-e555-4817-ae9d-cce503cf41b8" containerID="0512fe903a82ce0227f8f154244ff1d624ac68e57c8e265cd22c4a2a1ba35bea" exitCode=0 Feb 27 08:45:52 crc kubenswrapper[4906]: I0227 08:45:52.678474 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-cb5vd" Feb 27 08:45:52 crc kubenswrapper[4906]: I0227 08:45:52.678512 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-cb5vd" event={"ID":"72309fd3-e555-4817-ae9d-cce503cf41b8","Type":"ContainerDied","Data":"0512fe903a82ce0227f8f154244ff1d624ac68e57c8e265cd22c4a2a1ba35bea"} Feb 27 08:45:52 crc kubenswrapper[4906]: I0227 08:45:52.678915 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-cb5vd" event={"ID":"72309fd3-e555-4817-ae9d-cce503cf41b8","Type":"ContainerDied","Data":"521051c19b9c679a9585de75f96e6a7097e60f22388c2a073a215b90ee9114ed"} Feb 27 08:45:52 crc kubenswrapper[4906]: I0227 08:45:52.678946 4906 scope.go:117] "RemoveContainer" containerID="0512fe903a82ce0227f8f154244ff1d624ac68e57c8e265cd22c4a2a1ba35bea" Feb 27 08:45:52 crc kubenswrapper[4906]: I0227 08:45:52.701501 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-cb5vd"] Feb 27 08:45:52 crc kubenswrapper[4906]: I0227 08:45:52.703786 4906 scope.go:117] "RemoveContainer" containerID="0512fe903a82ce0227f8f154244ff1d624ac68e57c8e265cd22c4a2a1ba35bea" Feb 27 08:45:52 crc kubenswrapper[4906]: E0227 08:45:52.704384 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0512fe903a82ce0227f8f154244ff1d624ac68e57c8e265cd22c4a2a1ba35bea\": container with ID starting with 0512fe903a82ce0227f8f154244ff1d624ac68e57c8e265cd22c4a2a1ba35bea not found: ID does not exist" containerID="0512fe903a82ce0227f8f154244ff1d624ac68e57c8e265cd22c4a2a1ba35bea" Feb 27 08:45:52 crc kubenswrapper[4906]: I0227 08:45:52.704444 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0512fe903a82ce0227f8f154244ff1d624ac68e57c8e265cd22c4a2a1ba35bea"} err="failed to get container status \"0512fe903a82ce0227f8f154244ff1d624ac68e57c8e265cd22c4a2a1ba35bea\": rpc error: code = NotFound desc = could not find container \"0512fe903a82ce0227f8f154244ff1d624ac68e57c8e265cd22c4a2a1ba35bea\": container with ID starting with 0512fe903a82ce0227f8f154244ff1d624ac68e57c8e265cd22c4a2a1ba35bea not found: ID does not exist" Feb 27 08:45:52 crc kubenswrapper[4906]: I0227 08:45:52.707600 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-cb5vd"] Feb 27 08:45:53 crc kubenswrapper[4906]: I0227 08:45:53.670236 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vswv9"] Feb 27 08:45:53 crc kubenswrapper[4906]: E0227 08:45:53.670617 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72309fd3-e555-4817-ae9d-cce503cf41b8" containerName="registry-server" Feb 27 08:45:53 crc kubenswrapper[4906]: I0227 08:45:53.670640 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="72309fd3-e555-4817-ae9d-cce503cf41b8" containerName="registry-server" Feb 27 08:45:53 crc kubenswrapper[4906]: I0227 08:45:53.670826 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="72309fd3-e555-4817-ae9d-cce503cf41b8" containerName="registry-server" Feb 27 08:45:53 crc kubenswrapper[4906]: I0227 08:45:53.672058 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vswv9" Feb 27 08:45:53 crc kubenswrapper[4906]: I0227 08:45:53.689757 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vswv9"] Feb 27 08:45:53 crc kubenswrapper[4906]: I0227 08:45:53.729035 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab6f1c2b-b0be-4954-bee9-3a24d794f919-utilities\") pod \"community-operators-vswv9\" (UID: \"ab6f1c2b-b0be-4954-bee9-3a24d794f919\") " pod="openshift-marketplace/community-operators-vswv9" Feb 27 08:45:53 crc kubenswrapper[4906]: I0227 08:45:53.729161 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbdnp\" (UniqueName: \"kubernetes.io/projected/ab6f1c2b-b0be-4954-bee9-3a24d794f919-kube-api-access-vbdnp\") pod \"community-operators-vswv9\" (UID: \"ab6f1c2b-b0be-4954-bee9-3a24d794f919\") " pod="openshift-marketplace/community-operators-vswv9" Feb 27 08:45:53 crc kubenswrapper[4906]: I0227 08:45:53.729210 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab6f1c2b-b0be-4954-bee9-3a24d794f919-catalog-content\") pod \"community-operators-vswv9\" (UID: \"ab6f1c2b-b0be-4954-bee9-3a24d794f919\") " pod="openshift-marketplace/community-operators-vswv9" Feb 27 08:45:53 crc kubenswrapper[4906]: I0227 08:45:53.831099 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbdnp\" (UniqueName: \"kubernetes.io/projected/ab6f1c2b-b0be-4954-bee9-3a24d794f919-kube-api-access-vbdnp\") pod \"community-operators-vswv9\" (UID: \"ab6f1c2b-b0be-4954-bee9-3a24d794f919\") " pod="openshift-marketplace/community-operators-vswv9" Feb 27 08:45:53 crc kubenswrapper[4906]: I0227 08:45:53.831166 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab6f1c2b-b0be-4954-bee9-3a24d794f919-catalog-content\") pod \"community-operators-vswv9\" (UID: \"ab6f1c2b-b0be-4954-bee9-3a24d794f919\") " pod="openshift-marketplace/community-operators-vswv9" Feb 27 08:45:53 crc kubenswrapper[4906]: I0227 08:45:53.831212 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab6f1c2b-b0be-4954-bee9-3a24d794f919-utilities\") pod \"community-operators-vswv9\" (UID: \"ab6f1c2b-b0be-4954-bee9-3a24d794f919\") " pod="openshift-marketplace/community-operators-vswv9" Feb 27 08:45:53 crc kubenswrapper[4906]: I0227 08:45:53.831764 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab6f1c2b-b0be-4954-bee9-3a24d794f919-utilities\") pod \"community-operators-vswv9\" (UID: \"ab6f1c2b-b0be-4954-bee9-3a24d794f919\") " pod="openshift-marketplace/community-operators-vswv9" Feb 27 08:45:53 crc kubenswrapper[4906]: I0227 08:45:53.832067 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab6f1c2b-b0be-4954-bee9-3a24d794f919-catalog-content\") pod \"community-operators-vswv9\" (UID: \"ab6f1c2b-b0be-4954-bee9-3a24d794f919\") " pod="openshift-marketplace/community-operators-vswv9" Feb 27 08:45:53 crc kubenswrapper[4906]: I0227 08:45:53.854803 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbdnp\" (UniqueName: \"kubernetes.io/projected/ab6f1c2b-b0be-4954-bee9-3a24d794f919-kube-api-access-vbdnp\") pod \"community-operators-vswv9\" (UID: \"ab6f1c2b-b0be-4954-bee9-3a24d794f919\") " pod="openshift-marketplace/community-operators-vswv9" Feb 27 08:45:53 crc kubenswrapper[4906]: I0227 08:45:53.996610 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vswv9" Feb 27 08:45:54 crc kubenswrapper[4906]: I0227 08:45:54.369369 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vswv9"] Feb 27 08:45:54 crc kubenswrapper[4906]: W0227 08:45:54.370397 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podab6f1c2b_b0be_4954_bee9_3a24d794f919.slice/crio-225d8ef0cfa852fe673ee94e38d1654de2e8cb61142594acc25b7f90edb0c8e9 WatchSource:0}: Error finding container 225d8ef0cfa852fe673ee94e38d1654de2e8cb61142594acc25b7f90edb0c8e9: Status 404 returned error can't find the container with id 225d8ef0cfa852fe673ee94e38d1654de2e8cb61142594acc25b7f90edb0c8e9 Feb 27 08:45:54 crc kubenswrapper[4906]: I0227 08:45:54.560401 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72309fd3-e555-4817-ae9d-cce503cf41b8" path="/var/lib/kubelet/pods/72309fd3-e555-4817-ae9d-cce503cf41b8/volumes" Feb 27 08:45:54 crc kubenswrapper[4906]: I0227 08:45:54.717981 4906 generic.go:334] "Generic (PLEG): container finished" podID="ab6f1c2b-b0be-4954-bee9-3a24d794f919" containerID="37739bc3775dbf1bde9be7ec5960c6bab5d8d6208d3658c711e7d6024ecaf26d" exitCode=0 Feb 27 08:45:54 crc kubenswrapper[4906]: I0227 08:45:54.718078 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vswv9" event={"ID":"ab6f1c2b-b0be-4954-bee9-3a24d794f919","Type":"ContainerDied","Data":"37739bc3775dbf1bde9be7ec5960c6bab5d8d6208d3658c711e7d6024ecaf26d"} Feb 27 08:45:54 crc kubenswrapper[4906]: I0227 08:45:54.718430 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vswv9" event={"ID":"ab6f1c2b-b0be-4954-bee9-3a24d794f919","Type":"ContainerStarted","Data":"225d8ef0cfa852fe673ee94e38d1654de2e8cb61142594acc25b7f90edb0c8e9"} Feb 27 08:45:55 crc kubenswrapper[4906]: I0227 08:45:55.731055 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vswv9" event={"ID":"ab6f1c2b-b0be-4954-bee9-3a24d794f919","Type":"ContainerStarted","Data":"0784bce7fec31833d89dc519629304a495f503eb0025ba88e1d6a64e5fbe7534"} Feb 27 08:45:56 crc kubenswrapper[4906]: I0227 08:45:56.740789 4906 generic.go:334] "Generic (PLEG): container finished" podID="ab6f1c2b-b0be-4954-bee9-3a24d794f919" containerID="0784bce7fec31833d89dc519629304a495f503eb0025ba88e1d6a64e5fbe7534" exitCode=0 Feb 27 08:45:56 crc kubenswrapper[4906]: I0227 08:45:56.740852 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vswv9" event={"ID":"ab6f1c2b-b0be-4954-bee9-3a24d794f919","Type":"ContainerDied","Data":"0784bce7fec31833d89dc519629304a495f503eb0025ba88e1d6a64e5fbe7534"} Feb 27 08:45:57 crc kubenswrapper[4906]: I0227 08:45:57.752710 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vswv9" event={"ID":"ab6f1c2b-b0be-4954-bee9-3a24d794f919","Type":"ContainerStarted","Data":"ab733f36b2a39162c2378286fac82c3a683b687e7e72f39c5cd8e48e68204cb3"} Feb 27 08:45:57 crc kubenswrapper[4906]: I0227 08:45:57.779271 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vswv9" podStartSLOduration=2.294194143 podStartE2EDuration="4.779242167s" podCreationTimestamp="2026-02-27 08:45:53 +0000 UTC" firstStartedPulling="2026-02-27 08:45:54.720777865 +0000 UTC m=+1053.115179485" lastFinishedPulling="2026-02-27 08:45:57.205825889 +0000 UTC m=+1055.600227509" observedRunningTime="2026-02-27 08:45:57.776616518 +0000 UTC m=+1056.171018168" watchObservedRunningTime="2026-02-27 08:45:57.779242167 +0000 UTC m=+1056.173643797" Feb 27 08:45:59 crc kubenswrapper[4906]: I0227 08:45:59.204487 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-bm4k8" Feb 27 08:45:59 crc kubenswrapper[4906]: I0227 08:45:59.205968 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-bm4k8" Feb 27 08:45:59 crc kubenswrapper[4906]: I0227 08:45:59.245002 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-bm4k8" Feb 27 08:45:59 crc kubenswrapper[4906]: I0227 08:45:59.801715 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-bm4k8" Feb 27 08:46:00 crc kubenswrapper[4906]: I0227 08:46:00.145106 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536366-bgvjq"] Feb 27 08:46:00 crc kubenswrapper[4906]: I0227 08:46:00.146141 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536366-bgvjq" Feb 27 08:46:00 crc kubenswrapper[4906]: I0227 08:46:00.149588 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 08:46:00 crc kubenswrapper[4906]: I0227 08:46:00.149787 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 08:46:00 crc kubenswrapper[4906]: I0227 08:46:00.150295 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 08:46:00 crc kubenswrapper[4906]: I0227 08:46:00.162855 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536366-bgvjq"] Feb 27 08:46:00 crc kubenswrapper[4906]: I0227 08:46:00.233417 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jx6gc\" (UniqueName: \"kubernetes.io/projected/f4c4a66a-849a-4920-b332-4b6dacbec98b-kube-api-access-jx6gc\") pod \"auto-csr-approver-29536366-bgvjq\" (UID: \"f4c4a66a-849a-4920-b332-4b6dacbec98b\") " pod="openshift-infra/auto-csr-approver-29536366-bgvjq" Feb 27 08:46:00 crc kubenswrapper[4906]: I0227 08:46:00.334480 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jx6gc\" (UniqueName: \"kubernetes.io/projected/f4c4a66a-849a-4920-b332-4b6dacbec98b-kube-api-access-jx6gc\") pod \"auto-csr-approver-29536366-bgvjq\" (UID: \"f4c4a66a-849a-4920-b332-4b6dacbec98b\") " pod="openshift-infra/auto-csr-approver-29536366-bgvjq" Feb 27 08:46:00 crc kubenswrapper[4906]: I0227 08:46:00.364515 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jx6gc\" (UniqueName: \"kubernetes.io/projected/f4c4a66a-849a-4920-b332-4b6dacbec98b-kube-api-access-jx6gc\") pod \"auto-csr-approver-29536366-bgvjq\" (UID: \"f4c4a66a-849a-4920-b332-4b6dacbec98b\") " pod="openshift-infra/auto-csr-approver-29536366-bgvjq" Feb 27 08:46:00 crc kubenswrapper[4906]: I0227 08:46:00.474257 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536366-bgvjq" Feb 27 08:46:00 crc kubenswrapper[4906]: I0227 08:46:00.739164 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536366-bgvjq"] Feb 27 08:46:00 crc kubenswrapper[4906]: I0227 08:46:00.774778 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536366-bgvjq" event={"ID":"f4c4a66a-849a-4920-b332-4b6dacbec98b","Type":"ContainerStarted","Data":"1c3ddcce286cf2bf2c58ee0c0f581c9e08cc0153bc7cfcf06a953c0b6dcfb080"} Feb 27 08:46:01 crc kubenswrapper[4906]: I0227 08:46:01.937995 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n"] Feb 27 08:46:01 crc kubenswrapper[4906]: I0227 08:46:01.940764 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n" Feb 27 08:46:01 crc kubenswrapper[4906]: I0227 08:46:01.943560 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-5lhjr" Feb 27 08:46:02 crc kubenswrapper[4906]: I0227 08:46:01.999016 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n"] Feb 27 08:46:02 crc kubenswrapper[4906]: I0227 08:46:02.061671 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jq6pt\" (UniqueName: \"kubernetes.io/projected/bcec6a95-1051-45a5-9dc0-156dff89a709-kube-api-access-jq6pt\") pod \"599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n\" (UID: \"bcec6a95-1051-45a5-9dc0-156dff89a709\") " pod="openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n" Feb 27 08:46:02 crc kubenswrapper[4906]: I0227 08:46:02.061769 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bcec6a95-1051-45a5-9dc0-156dff89a709-util\") pod \"599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n\" (UID: \"bcec6a95-1051-45a5-9dc0-156dff89a709\") " pod="openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n" Feb 27 08:46:02 crc kubenswrapper[4906]: I0227 08:46:02.061853 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bcec6a95-1051-45a5-9dc0-156dff89a709-bundle\") pod \"599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n\" (UID: \"bcec6a95-1051-45a5-9dc0-156dff89a709\") " pod="openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n" Feb 27 08:46:02 crc kubenswrapper[4906]: I0227 08:46:02.164124 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bcec6a95-1051-45a5-9dc0-156dff89a709-util\") pod \"599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n\" (UID: \"bcec6a95-1051-45a5-9dc0-156dff89a709\") " pod="openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n" Feb 27 08:46:02 crc kubenswrapper[4906]: I0227 08:46:02.164215 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bcec6a95-1051-45a5-9dc0-156dff89a709-bundle\") pod \"599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n\" (UID: \"bcec6a95-1051-45a5-9dc0-156dff89a709\") " pod="openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n" Feb 27 08:46:02 crc kubenswrapper[4906]: I0227 08:46:02.164310 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jq6pt\" (UniqueName: \"kubernetes.io/projected/bcec6a95-1051-45a5-9dc0-156dff89a709-kube-api-access-jq6pt\") pod \"599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n\" (UID: \"bcec6a95-1051-45a5-9dc0-156dff89a709\") " pod="openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n" Feb 27 08:46:02 crc kubenswrapper[4906]: I0227 08:46:02.164897 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bcec6a95-1051-45a5-9dc0-156dff89a709-util\") pod \"599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n\" (UID: \"bcec6a95-1051-45a5-9dc0-156dff89a709\") " pod="openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n" Feb 27 08:46:02 crc kubenswrapper[4906]: I0227 08:46:02.165022 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bcec6a95-1051-45a5-9dc0-156dff89a709-bundle\") pod \"599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n\" (UID: \"bcec6a95-1051-45a5-9dc0-156dff89a709\") " pod="openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n" Feb 27 08:46:02 crc kubenswrapper[4906]: I0227 08:46:02.187446 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jq6pt\" (UniqueName: \"kubernetes.io/projected/bcec6a95-1051-45a5-9dc0-156dff89a709-kube-api-access-jq6pt\") pod \"599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n\" (UID: \"bcec6a95-1051-45a5-9dc0-156dff89a709\") " pod="openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n" Feb 27 08:46:02 crc kubenswrapper[4906]: I0227 08:46:02.259525 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n" Feb 27 08:46:02 crc kubenswrapper[4906]: I0227 08:46:02.519678 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n"] Feb 27 08:46:02 crc kubenswrapper[4906]: I0227 08:46:02.790985 4906 generic.go:334] "Generic (PLEG): container finished" podID="f4c4a66a-849a-4920-b332-4b6dacbec98b" containerID="d74bf3d5cb0decd432277184b904ec73a48744ba9f974b92c711385bc91ced30" exitCode=0 Feb 27 08:46:02 crc kubenswrapper[4906]: I0227 08:46:02.791174 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536366-bgvjq" event={"ID":"f4c4a66a-849a-4920-b332-4b6dacbec98b","Type":"ContainerDied","Data":"d74bf3d5cb0decd432277184b904ec73a48744ba9f974b92c711385bc91ced30"} Feb 27 08:46:02 crc kubenswrapper[4906]: I0227 08:46:02.794128 4906 generic.go:334] "Generic (PLEG): container finished" podID="bcec6a95-1051-45a5-9dc0-156dff89a709" containerID="9a4f29e9aa84c640f30a37c838e0c6ca923ce31cd3b151a0d2df3f8c4cca310f" exitCode=0 Feb 27 08:46:02 crc kubenswrapper[4906]: I0227 08:46:02.794233 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n" event={"ID":"bcec6a95-1051-45a5-9dc0-156dff89a709","Type":"ContainerDied","Data":"9a4f29e9aa84c640f30a37c838e0c6ca923ce31cd3b151a0d2df3f8c4cca310f"} Feb 27 08:46:02 crc kubenswrapper[4906]: I0227 08:46:02.794303 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n" event={"ID":"bcec6a95-1051-45a5-9dc0-156dff89a709","Type":"ContainerStarted","Data":"5caef32c0bbdec34c22223868fdbe3ef51b38c1f78475539926a854018caf775"} Feb 27 08:46:03 crc kubenswrapper[4906]: I0227 08:46:03.813937 4906 generic.go:334] "Generic (PLEG): container finished" podID="bcec6a95-1051-45a5-9dc0-156dff89a709" containerID="9139bfdd07f0699fa0b48702a787b27307ddd81c08f6e97a0d443708791dead3" exitCode=0 Feb 27 08:46:03 crc kubenswrapper[4906]: I0227 08:46:03.814091 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n" event={"ID":"bcec6a95-1051-45a5-9dc0-156dff89a709","Type":"ContainerDied","Data":"9139bfdd07f0699fa0b48702a787b27307ddd81c08f6e97a0d443708791dead3"} Feb 27 08:46:04 crc kubenswrapper[4906]: I0227 08:46:04.000861 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vswv9" Feb 27 08:46:04 crc kubenswrapper[4906]: I0227 08:46:04.001752 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vswv9" Feb 27 08:46:04 crc kubenswrapper[4906]: I0227 08:46:04.058739 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vswv9" Feb 27 08:46:04 crc kubenswrapper[4906]: I0227 08:46:04.157813 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536366-bgvjq" Feb 27 08:46:04 crc kubenswrapper[4906]: I0227 08:46:04.204894 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jx6gc\" (UniqueName: \"kubernetes.io/projected/f4c4a66a-849a-4920-b332-4b6dacbec98b-kube-api-access-jx6gc\") pod \"f4c4a66a-849a-4920-b332-4b6dacbec98b\" (UID: \"f4c4a66a-849a-4920-b332-4b6dacbec98b\") " Feb 27 08:46:04 crc kubenswrapper[4906]: I0227 08:46:04.212016 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f4c4a66a-849a-4920-b332-4b6dacbec98b-kube-api-access-jx6gc" (OuterVolumeSpecName: "kube-api-access-jx6gc") pod "f4c4a66a-849a-4920-b332-4b6dacbec98b" (UID: "f4c4a66a-849a-4920-b332-4b6dacbec98b"). InnerVolumeSpecName "kube-api-access-jx6gc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:46:04 crc kubenswrapper[4906]: I0227 08:46:04.306830 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jx6gc\" (UniqueName: \"kubernetes.io/projected/f4c4a66a-849a-4920-b332-4b6dacbec98b-kube-api-access-jx6gc\") on node \"crc\" DevicePath \"\"" Feb 27 08:46:04 crc kubenswrapper[4906]: I0227 08:46:04.826840 4906 generic.go:334] "Generic (PLEG): container finished" podID="bcec6a95-1051-45a5-9dc0-156dff89a709" containerID="7eae59e7a51d9c7b999a79d5b80242e1ddbc9aee0146bd1f4995d865adf43e30" exitCode=0 Feb 27 08:46:04 crc kubenswrapper[4906]: I0227 08:46:04.826935 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n" event={"ID":"bcec6a95-1051-45a5-9dc0-156dff89a709","Type":"ContainerDied","Data":"7eae59e7a51d9c7b999a79d5b80242e1ddbc9aee0146bd1f4995d865adf43e30"} Feb 27 08:46:04 crc kubenswrapper[4906]: I0227 08:46:04.830386 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536366-bgvjq" Feb 27 08:46:04 crc kubenswrapper[4906]: I0227 08:46:04.830858 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536366-bgvjq" event={"ID":"f4c4a66a-849a-4920-b332-4b6dacbec98b","Type":"ContainerDied","Data":"1c3ddcce286cf2bf2c58ee0c0f581c9e08cc0153bc7cfcf06a953c0b6dcfb080"} Feb 27 08:46:04 crc kubenswrapper[4906]: I0227 08:46:04.830931 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1c3ddcce286cf2bf2c58ee0c0f581c9e08cc0153bc7cfcf06a953c0b6dcfb080" Feb 27 08:46:04 crc kubenswrapper[4906]: I0227 08:46:04.883837 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vswv9" Feb 27 08:46:05 crc kubenswrapper[4906]: I0227 08:46:05.233176 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536360-pksm2"] Feb 27 08:46:05 crc kubenswrapper[4906]: I0227 08:46:05.236405 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536360-pksm2"] Feb 27 08:46:06 crc kubenswrapper[4906]: I0227 08:46:06.123467 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n" Feb 27 08:46:06 crc kubenswrapper[4906]: I0227 08:46:06.244960 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bcec6a95-1051-45a5-9dc0-156dff89a709-bundle\") pod \"bcec6a95-1051-45a5-9dc0-156dff89a709\" (UID: \"bcec6a95-1051-45a5-9dc0-156dff89a709\") " Feb 27 08:46:06 crc kubenswrapper[4906]: I0227 08:46:06.245267 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jq6pt\" (UniqueName: \"kubernetes.io/projected/bcec6a95-1051-45a5-9dc0-156dff89a709-kube-api-access-jq6pt\") pod \"bcec6a95-1051-45a5-9dc0-156dff89a709\" (UID: \"bcec6a95-1051-45a5-9dc0-156dff89a709\") " Feb 27 08:46:06 crc kubenswrapper[4906]: I0227 08:46:06.245328 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bcec6a95-1051-45a5-9dc0-156dff89a709-util\") pod \"bcec6a95-1051-45a5-9dc0-156dff89a709\" (UID: \"bcec6a95-1051-45a5-9dc0-156dff89a709\") " Feb 27 08:46:06 crc kubenswrapper[4906]: I0227 08:46:06.246840 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bcec6a95-1051-45a5-9dc0-156dff89a709-bundle" (OuterVolumeSpecName: "bundle") pod "bcec6a95-1051-45a5-9dc0-156dff89a709" (UID: "bcec6a95-1051-45a5-9dc0-156dff89a709"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:46:06 crc kubenswrapper[4906]: I0227 08:46:06.255631 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcec6a95-1051-45a5-9dc0-156dff89a709-kube-api-access-jq6pt" (OuterVolumeSpecName: "kube-api-access-jq6pt") pod "bcec6a95-1051-45a5-9dc0-156dff89a709" (UID: "bcec6a95-1051-45a5-9dc0-156dff89a709"). InnerVolumeSpecName "kube-api-access-jq6pt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:46:06 crc kubenswrapper[4906]: I0227 08:46:06.276774 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bcec6a95-1051-45a5-9dc0-156dff89a709-util" (OuterVolumeSpecName: "util") pod "bcec6a95-1051-45a5-9dc0-156dff89a709" (UID: "bcec6a95-1051-45a5-9dc0-156dff89a709"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:46:06 crc kubenswrapper[4906]: I0227 08:46:06.348020 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jq6pt\" (UniqueName: \"kubernetes.io/projected/bcec6a95-1051-45a5-9dc0-156dff89a709-kube-api-access-jq6pt\") on node \"crc\" DevicePath \"\"" Feb 27 08:46:06 crc kubenswrapper[4906]: I0227 08:46:06.348092 4906 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/bcec6a95-1051-45a5-9dc0-156dff89a709-util\") on node \"crc\" DevicePath \"\"" Feb 27 08:46:06 crc kubenswrapper[4906]: I0227 08:46:06.348124 4906 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/bcec6a95-1051-45a5-9dc0-156dff89a709-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:46:06 crc kubenswrapper[4906]: I0227 08:46:06.563190 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdb5a434-dd10-4c94-836b-3333e84775d7" path="/var/lib/kubelet/pods/fdb5a434-dd10-4c94-836b-3333e84775d7/volumes" Feb 27 08:46:06 crc kubenswrapper[4906]: I0227 08:46:06.845420 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n" Feb 27 08:46:06 crc kubenswrapper[4906]: I0227 08:46:06.845430 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n" event={"ID":"bcec6a95-1051-45a5-9dc0-156dff89a709","Type":"ContainerDied","Data":"5caef32c0bbdec34c22223868fdbe3ef51b38c1f78475539926a854018caf775"} Feb 27 08:46:06 crc kubenswrapper[4906]: I0227 08:46:06.845908 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5caef32c0bbdec34c22223868fdbe3ef51b38c1f78475539926a854018caf775" Feb 27 08:46:07 crc kubenswrapper[4906]: I0227 08:46:07.460582 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vswv9"] Feb 27 08:46:07 crc kubenswrapper[4906]: I0227 08:46:07.851797 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vswv9" podUID="ab6f1c2b-b0be-4954-bee9-3a24d794f919" containerName="registry-server" containerID="cri-o://ab733f36b2a39162c2378286fac82c3a683b687e7e72f39c5cd8e48e68204cb3" gracePeriod=2 Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.317258 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vswv9" Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.382112 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab6f1c2b-b0be-4954-bee9-3a24d794f919-utilities\") pod \"ab6f1c2b-b0be-4954-bee9-3a24d794f919\" (UID: \"ab6f1c2b-b0be-4954-bee9-3a24d794f919\") " Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.382176 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbdnp\" (UniqueName: \"kubernetes.io/projected/ab6f1c2b-b0be-4954-bee9-3a24d794f919-kube-api-access-vbdnp\") pod \"ab6f1c2b-b0be-4954-bee9-3a24d794f919\" (UID: \"ab6f1c2b-b0be-4954-bee9-3a24d794f919\") " Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.382214 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab6f1c2b-b0be-4954-bee9-3a24d794f919-catalog-content\") pod \"ab6f1c2b-b0be-4954-bee9-3a24d794f919\" (UID: \"ab6f1c2b-b0be-4954-bee9-3a24d794f919\") " Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.384172 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab6f1c2b-b0be-4954-bee9-3a24d794f919-utilities" (OuterVolumeSpecName: "utilities") pod "ab6f1c2b-b0be-4954-bee9-3a24d794f919" (UID: "ab6f1c2b-b0be-4954-bee9-3a24d794f919"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.388641 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab6f1c2b-b0be-4954-bee9-3a24d794f919-kube-api-access-vbdnp" (OuterVolumeSpecName: "kube-api-access-vbdnp") pod "ab6f1c2b-b0be-4954-bee9-3a24d794f919" (UID: "ab6f1c2b-b0be-4954-bee9-3a24d794f919"). InnerVolumeSpecName "kube-api-access-vbdnp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.444378 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab6f1c2b-b0be-4954-bee9-3a24d794f919-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ab6f1c2b-b0be-4954-bee9-3a24d794f919" (UID: "ab6f1c2b-b0be-4954-bee9-3a24d794f919"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.484228 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab6f1c2b-b0be-4954-bee9-3a24d794f919-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.484276 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbdnp\" (UniqueName: \"kubernetes.io/projected/ab6f1c2b-b0be-4954-bee9-3a24d794f919-kube-api-access-vbdnp\") on node \"crc\" DevicePath \"\"" Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.484290 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab6f1c2b-b0be-4954-bee9-3a24d794f919-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.862346 4906 generic.go:334] "Generic (PLEG): container finished" podID="ab6f1c2b-b0be-4954-bee9-3a24d794f919" containerID="ab733f36b2a39162c2378286fac82c3a683b687e7e72f39c5cd8e48e68204cb3" exitCode=0 Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.862455 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vswv9" Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.862960 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vswv9" event={"ID":"ab6f1c2b-b0be-4954-bee9-3a24d794f919","Type":"ContainerDied","Data":"ab733f36b2a39162c2378286fac82c3a683b687e7e72f39c5cd8e48e68204cb3"} Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.863454 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vswv9" event={"ID":"ab6f1c2b-b0be-4954-bee9-3a24d794f919","Type":"ContainerDied","Data":"225d8ef0cfa852fe673ee94e38d1654de2e8cb61142594acc25b7f90edb0c8e9"} Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.863541 4906 scope.go:117] "RemoveContainer" containerID="ab733f36b2a39162c2378286fac82c3a683b687e7e72f39c5cd8e48e68204cb3" Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.894216 4906 scope.go:117] "RemoveContainer" containerID="0784bce7fec31833d89dc519629304a495f503eb0025ba88e1d6a64e5fbe7534" Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.895862 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vswv9"] Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.902431 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vswv9"] Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.928565 4906 scope.go:117] "RemoveContainer" containerID="37739bc3775dbf1bde9be7ec5960c6bab5d8d6208d3658c711e7d6024ecaf26d" Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.948936 4906 scope.go:117] "RemoveContainer" containerID="ab733f36b2a39162c2378286fac82c3a683b687e7e72f39c5cd8e48e68204cb3" Feb 27 08:46:08 crc kubenswrapper[4906]: E0227 08:46:08.949641 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab733f36b2a39162c2378286fac82c3a683b687e7e72f39c5cd8e48e68204cb3\": container with ID starting with ab733f36b2a39162c2378286fac82c3a683b687e7e72f39c5cd8e48e68204cb3 not found: ID does not exist" containerID="ab733f36b2a39162c2378286fac82c3a683b687e7e72f39c5cd8e48e68204cb3" Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.949763 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab733f36b2a39162c2378286fac82c3a683b687e7e72f39c5cd8e48e68204cb3"} err="failed to get container status \"ab733f36b2a39162c2378286fac82c3a683b687e7e72f39c5cd8e48e68204cb3\": rpc error: code = NotFound desc = could not find container \"ab733f36b2a39162c2378286fac82c3a683b687e7e72f39c5cd8e48e68204cb3\": container with ID starting with ab733f36b2a39162c2378286fac82c3a683b687e7e72f39c5cd8e48e68204cb3 not found: ID does not exist" Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.949835 4906 scope.go:117] "RemoveContainer" containerID="0784bce7fec31833d89dc519629304a495f503eb0025ba88e1d6a64e5fbe7534" Feb 27 08:46:08 crc kubenswrapper[4906]: E0227 08:46:08.950774 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0784bce7fec31833d89dc519629304a495f503eb0025ba88e1d6a64e5fbe7534\": container with ID starting with 0784bce7fec31833d89dc519629304a495f503eb0025ba88e1d6a64e5fbe7534 not found: ID does not exist" containerID="0784bce7fec31833d89dc519629304a495f503eb0025ba88e1d6a64e5fbe7534" Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.950854 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0784bce7fec31833d89dc519629304a495f503eb0025ba88e1d6a64e5fbe7534"} err="failed to get container status \"0784bce7fec31833d89dc519629304a495f503eb0025ba88e1d6a64e5fbe7534\": rpc error: code = NotFound desc = could not find container \"0784bce7fec31833d89dc519629304a495f503eb0025ba88e1d6a64e5fbe7534\": container with ID starting with 0784bce7fec31833d89dc519629304a495f503eb0025ba88e1d6a64e5fbe7534 not found: ID does not exist" Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.950921 4906 scope.go:117] "RemoveContainer" containerID="37739bc3775dbf1bde9be7ec5960c6bab5d8d6208d3658c711e7d6024ecaf26d" Feb 27 08:46:08 crc kubenswrapper[4906]: E0227 08:46:08.951676 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37739bc3775dbf1bde9be7ec5960c6bab5d8d6208d3658c711e7d6024ecaf26d\": container with ID starting with 37739bc3775dbf1bde9be7ec5960c6bab5d8d6208d3658c711e7d6024ecaf26d not found: ID does not exist" containerID="37739bc3775dbf1bde9be7ec5960c6bab5d8d6208d3658c711e7d6024ecaf26d" Feb 27 08:46:08 crc kubenswrapper[4906]: I0227 08:46:08.951726 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37739bc3775dbf1bde9be7ec5960c6bab5d8d6208d3658c711e7d6024ecaf26d"} err="failed to get container status \"37739bc3775dbf1bde9be7ec5960c6bab5d8d6208d3658c711e7d6024ecaf26d\": rpc error: code = NotFound desc = could not find container \"37739bc3775dbf1bde9be7ec5960c6bab5d8d6208d3658c711e7d6024ecaf26d\": container with ID starting with 37739bc3775dbf1bde9be7ec5960c6bab5d8d6208d3658c711e7d6024ecaf26d not found: ID does not exist" Feb 27 08:46:10 crc kubenswrapper[4906]: I0227 08:46:10.563014 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab6f1c2b-b0be-4954-bee9-3a24d794f919" path="/var/lib/kubelet/pods/ab6f1c2b-b0be-4954-bee9-3a24d794f919/volumes" Feb 27 08:46:11 crc kubenswrapper[4906]: I0227 08:46:11.785014 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-5fb5699c68-bb7nb"] Feb 27 08:46:11 crc kubenswrapper[4906]: E0227 08:46:11.785754 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab6f1c2b-b0be-4954-bee9-3a24d794f919" containerName="extract-utilities" Feb 27 08:46:11 crc kubenswrapper[4906]: I0227 08:46:11.785776 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab6f1c2b-b0be-4954-bee9-3a24d794f919" containerName="extract-utilities" Feb 27 08:46:11 crc kubenswrapper[4906]: E0227 08:46:11.785804 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab6f1c2b-b0be-4954-bee9-3a24d794f919" containerName="extract-content" Feb 27 08:46:11 crc kubenswrapper[4906]: I0227 08:46:11.785817 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab6f1c2b-b0be-4954-bee9-3a24d794f919" containerName="extract-content" Feb 27 08:46:11 crc kubenswrapper[4906]: E0227 08:46:11.785841 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4c4a66a-849a-4920-b332-4b6dacbec98b" containerName="oc" Feb 27 08:46:11 crc kubenswrapper[4906]: I0227 08:46:11.785855 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4c4a66a-849a-4920-b332-4b6dacbec98b" containerName="oc" Feb 27 08:46:11 crc kubenswrapper[4906]: E0227 08:46:11.785869 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcec6a95-1051-45a5-9dc0-156dff89a709" containerName="extract" Feb 27 08:46:11 crc kubenswrapper[4906]: I0227 08:46:11.785911 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcec6a95-1051-45a5-9dc0-156dff89a709" containerName="extract" Feb 27 08:46:11 crc kubenswrapper[4906]: E0227 08:46:11.785931 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab6f1c2b-b0be-4954-bee9-3a24d794f919" containerName="registry-server" Feb 27 08:46:11 crc kubenswrapper[4906]: I0227 08:46:11.785943 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab6f1c2b-b0be-4954-bee9-3a24d794f919" containerName="registry-server" Feb 27 08:46:11 crc kubenswrapper[4906]: E0227 08:46:11.785961 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcec6a95-1051-45a5-9dc0-156dff89a709" containerName="pull" Feb 27 08:46:11 crc kubenswrapper[4906]: I0227 08:46:11.785973 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcec6a95-1051-45a5-9dc0-156dff89a709" containerName="pull" Feb 27 08:46:11 crc kubenswrapper[4906]: E0227 08:46:11.785997 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcec6a95-1051-45a5-9dc0-156dff89a709" containerName="util" Feb 27 08:46:11 crc kubenswrapper[4906]: I0227 08:46:11.786009 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcec6a95-1051-45a5-9dc0-156dff89a709" containerName="util" Feb 27 08:46:11 crc kubenswrapper[4906]: I0227 08:46:11.786194 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcec6a95-1051-45a5-9dc0-156dff89a709" containerName="extract" Feb 27 08:46:11 crc kubenswrapper[4906]: I0227 08:46:11.786212 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab6f1c2b-b0be-4954-bee9-3a24d794f919" containerName="registry-server" Feb 27 08:46:11 crc kubenswrapper[4906]: I0227 08:46:11.786240 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4c4a66a-849a-4920-b332-4b6dacbec98b" containerName="oc" Feb 27 08:46:11 crc kubenswrapper[4906]: I0227 08:46:11.787215 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-5fb5699c68-bb7nb" Feb 27 08:46:11 crc kubenswrapper[4906]: I0227 08:46:11.790023 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-9sm9v" Feb 27 08:46:11 crc kubenswrapper[4906]: I0227 08:46:11.815093 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-5fb5699c68-bb7nb"] Feb 27 08:46:11 crc kubenswrapper[4906]: I0227 08:46:11.836575 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8w78\" (UniqueName: \"kubernetes.io/projected/05a6722c-3dc2-412e-aa57-9f5201e6987e-kube-api-access-j8w78\") pod \"openstack-operator-controller-init-5fb5699c68-bb7nb\" (UID: \"05a6722c-3dc2-412e-aa57-9f5201e6987e\") " pod="openstack-operators/openstack-operator-controller-init-5fb5699c68-bb7nb" Feb 27 08:46:11 crc kubenswrapper[4906]: I0227 08:46:11.938440 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8w78\" (UniqueName: \"kubernetes.io/projected/05a6722c-3dc2-412e-aa57-9f5201e6987e-kube-api-access-j8w78\") pod \"openstack-operator-controller-init-5fb5699c68-bb7nb\" (UID: \"05a6722c-3dc2-412e-aa57-9f5201e6987e\") " pod="openstack-operators/openstack-operator-controller-init-5fb5699c68-bb7nb" Feb 27 08:46:11 crc kubenswrapper[4906]: I0227 08:46:11.963700 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8w78\" (UniqueName: \"kubernetes.io/projected/05a6722c-3dc2-412e-aa57-9f5201e6987e-kube-api-access-j8w78\") pod \"openstack-operator-controller-init-5fb5699c68-bb7nb\" (UID: \"05a6722c-3dc2-412e-aa57-9f5201e6987e\") " pod="openstack-operators/openstack-operator-controller-init-5fb5699c68-bb7nb" Feb 27 08:46:12 crc kubenswrapper[4906]: I0227 08:46:12.148062 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-5fb5699c68-bb7nb" Feb 27 08:46:12 crc kubenswrapper[4906]: I0227 08:46:12.621130 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-5fb5699c68-bb7nb"] Feb 27 08:46:12 crc kubenswrapper[4906]: I0227 08:46:12.900955 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-5fb5699c68-bb7nb" event={"ID":"05a6722c-3dc2-412e-aa57-9f5201e6987e","Type":"ContainerStarted","Data":"bf52727235f6f79385179893419665a1f05a5168f51f518405095b26e9901192"} Feb 27 08:46:16 crc kubenswrapper[4906]: I0227 08:46:16.933289 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-5fb5699c68-bb7nb" event={"ID":"05a6722c-3dc2-412e-aa57-9f5201e6987e","Type":"ContainerStarted","Data":"06b4150678d7745afe8dd7bb79d49579313066c3892f0cfd6d5db4463e6b6799"} Feb 27 08:46:16 crc kubenswrapper[4906]: I0227 08:46:16.933918 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-5fb5699c68-bb7nb" Feb 27 08:46:16 crc kubenswrapper[4906]: I0227 08:46:16.972679 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-5fb5699c68-bb7nb" podStartSLOduration=2.025270868 podStartE2EDuration="5.972651024s" podCreationTimestamp="2026-02-27 08:46:11 +0000 UTC" firstStartedPulling="2026-02-27 08:46:12.625056005 +0000 UTC m=+1071.019457615" lastFinishedPulling="2026-02-27 08:46:16.572436161 +0000 UTC m=+1074.966837771" observedRunningTime="2026-02-27 08:46:16.962059036 +0000 UTC m=+1075.356460646" watchObservedRunningTime="2026-02-27 08:46:16.972651024 +0000 UTC m=+1075.367052634" Feb 27 08:46:22 crc kubenswrapper[4906]: I0227 08:46:22.151478 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-5fb5699c68-bb7nb" Feb 27 08:46:24 crc kubenswrapper[4906]: I0227 08:46:24.844311 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:46:24 crc kubenswrapper[4906]: I0227 08:46:24.846558 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:46:28 crc kubenswrapper[4906]: I0227 08:46:28.938419 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xpzfp"] Feb 27 08:46:28 crc kubenswrapper[4906]: I0227 08:46:28.940272 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xpzfp" Feb 27 08:46:28 crc kubenswrapper[4906]: I0227 08:46:28.977274 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xpzfp"] Feb 27 08:46:29 crc kubenswrapper[4906]: I0227 08:46:29.019248 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38b40d50-8598-4279-a798-c499b5152457-catalog-content\") pod \"certified-operators-xpzfp\" (UID: \"38b40d50-8598-4279-a798-c499b5152457\") " pod="openshift-marketplace/certified-operators-xpzfp" Feb 27 08:46:29 crc kubenswrapper[4906]: I0227 08:46:29.019679 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w5sg9\" (UniqueName: \"kubernetes.io/projected/38b40d50-8598-4279-a798-c499b5152457-kube-api-access-w5sg9\") pod \"certified-operators-xpzfp\" (UID: \"38b40d50-8598-4279-a798-c499b5152457\") " pod="openshift-marketplace/certified-operators-xpzfp" Feb 27 08:46:29 crc kubenswrapper[4906]: I0227 08:46:29.019744 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38b40d50-8598-4279-a798-c499b5152457-utilities\") pod \"certified-operators-xpzfp\" (UID: \"38b40d50-8598-4279-a798-c499b5152457\") " pod="openshift-marketplace/certified-operators-xpzfp" Feb 27 08:46:29 crc kubenswrapper[4906]: I0227 08:46:29.120744 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w5sg9\" (UniqueName: \"kubernetes.io/projected/38b40d50-8598-4279-a798-c499b5152457-kube-api-access-w5sg9\") pod \"certified-operators-xpzfp\" (UID: \"38b40d50-8598-4279-a798-c499b5152457\") " pod="openshift-marketplace/certified-operators-xpzfp" Feb 27 08:46:29 crc kubenswrapper[4906]: I0227 08:46:29.120815 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38b40d50-8598-4279-a798-c499b5152457-utilities\") pod \"certified-operators-xpzfp\" (UID: \"38b40d50-8598-4279-a798-c499b5152457\") " pod="openshift-marketplace/certified-operators-xpzfp" Feb 27 08:46:29 crc kubenswrapper[4906]: I0227 08:46:29.120859 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38b40d50-8598-4279-a798-c499b5152457-catalog-content\") pod \"certified-operators-xpzfp\" (UID: \"38b40d50-8598-4279-a798-c499b5152457\") " pod="openshift-marketplace/certified-operators-xpzfp" Feb 27 08:46:29 crc kubenswrapper[4906]: I0227 08:46:29.121422 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38b40d50-8598-4279-a798-c499b5152457-catalog-content\") pod \"certified-operators-xpzfp\" (UID: \"38b40d50-8598-4279-a798-c499b5152457\") " pod="openshift-marketplace/certified-operators-xpzfp" Feb 27 08:46:29 crc kubenswrapper[4906]: I0227 08:46:29.121525 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38b40d50-8598-4279-a798-c499b5152457-utilities\") pod \"certified-operators-xpzfp\" (UID: \"38b40d50-8598-4279-a798-c499b5152457\") " pod="openshift-marketplace/certified-operators-xpzfp" Feb 27 08:46:29 crc kubenswrapper[4906]: I0227 08:46:29.144842 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w5sg9\" (UniqueName: \"kubernetes.io/projected/38b40d50-8598-4279-a798-c499b5152457-kube-api-access-w5sg9\") pod \"certified-operators-xpzfp\" (UID: \"38b40d50-8598-4279-a798-c499b5152457\") " pod="openshift-marketplace/certified-operators-xpzfp" Feb 27 08:46:29 crc kubenswrapper[4906]: I0227 08:46:29.258993 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xpzfp" Feb 27 08:46:29 crc kubenswrapper[4906]: I0227 08:46:29.651687 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xpzfp"] Feb 27 08:46:30 crc kubenswrapper[4906]: I0227 08:46:30.048366 4906 generic.go:334] "Generic (PLEG): container finished" podID="38b40d50-8598-4279-a798-c499b5152457" containerID="ad1c818a9cf132f2f7d2fd162f8a65bf1fdc14c3e23220df274319f765d65fb8" exitCode=0 Feb 27 08:46:30 crc kubenswrapper[4906]: I0227 08:46:30.048492 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xpzfp" event={"ID":"38b40d50-8598-4279-a798-c499b5152457","Type":"ContainerDied","Data":"ad1c818a9cf132f2f7d2fd162f8a65bf1fdc14c3e23220df274319f765d65fb8"} Feb 27 08:46:30 crc kubenswrapper[4906]: I0227 08:46:30.050640 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xpzfp" event={"ID":"38b40d50-8598-4279-a798-c499b5152457","Type":"ContainerStarted","Data":"a482787af5fd7ea233e747c21c36e8c810833b22ab072c02a2d9172f1efd6d55"} Feb 27 08:46:32 crc kubenswrapper[4906]: I0227 08:46:32.068535 4906 generic.go:334] "Generic (PLEG): container finished" podID="38b40d50-8598-4279-a798-c499b5152457" containerID="6dcc794a6141c862bd8de6b7a60d11f055d475e4049ce603359ff9dace4bcb6a" exitCode=0 Feb 27 08:46:32 crc kubenswrapper[4906]: I0227 08:46:32.068637 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xpzfp" event={"ID":"38b40d50-8598-4279-a798-c499b5152457","Type":"ContainerDied","Data":"6dcc794a6141c862bd8de6b7a60d11f055d475e4049ce603359ff9dace4bcb6a"} Feb 27 08:46:34 crc kubenswrapper[4906]: I0227 08:46:34.087856 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xpzfp" event={"ID":"38b40d50-8598-4279-a798-c499b5152457","Type":"ContainerStarted","Data":"29901429b7a8eff95f56596947e1db91c65bfc6117402c9737a8c6a5c09f5c28"} Feb 27 08:46:34 crc kubenswrapper[4906]: I0227 08:46:34.128298 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xpzfp" podStartSLOduration=3.060012769 podStartE2EDuration="6.128274639s" podCreationTimestamp="2026-02-27 08:46:28 +0000 UTC" firstStartedPulling="2026-02-27 08:46:30.05034101 +0000 UTC m=+1088.444742610" lastFinishedPulling="2026-02-27 08:46:33.11860287 +0000 UTC m=+1091.513004480" observedRunningTime="2026-02-27 08:46:34.124019157 +0000 UTC m=+1092.518420767" watchObservedRunningTime="2026-02-27 08:46:34.128274639 +0000 UTC m=+1092.522676249" Feb 27 08:46:39 crc kubenswrapper[4906]: I0227 08:46:39.259947 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xpzfp" Feb 27 08:46:39 crc kubenswrapper[4906]: I0227 08:46:39.261022 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xpzfp" Feb 27 08:46:39 crc kubenswrapper[4906]: I0227 08:46:39.304820 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xpzfp" Feb 27 08:46:40 crc kubenswrapper[4906]: I0227 08:46:40.173219 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xpzfp" Feb 27 08:46:40 crc kubenswrapper[4906]: I0227 08:46:40.226468 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xpzfp"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.499621 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6db6876945-cq6ns"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.510399 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6db6876945-cq6ns" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.513717 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-ftlc6" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.523145 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-55d77d7b5c-2t8hl"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.524869 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-55d77d7b5c-2t8hl" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.537736 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-7j8xq" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.541322 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-5d87c9d997-8q7nm"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.542533 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-5d87c9d997-8q7nm" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.547270 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-pbrgm" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.568142 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-5d87c9d997-8q7nm"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.573870 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-cf99c678f-jn9zw"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.574756 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-cf99c678f-jn9zw" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.578127 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-nm667" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.580202 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-64db6967f8-bvglp"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.583316 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-64db6967f8-bvglp" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.585716 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-fpd9s" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.600937 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-cf99c678f-jn9zw"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.603134 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-64db6967f8-bvglp"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.608026 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-78bc7f9bd9-pvjd6"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.609030 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-78bc7f9bd9-pvjd6" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.613036 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-5h79n" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.625058 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6db6876945-cq6ns"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.631621 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jd6rt\" (UniqueName: \"kubernetes.io/projected/1bb840aa-a248-4f16-8b8e-2710d728a7f8-kube-api-access-jd6rt\") pod \"designate-operator-controller-manager-5d87c9d997-8q7nm\" (UID: \"1bb840aa-a248-4f16-8b8e-2710d728a7f8\") " pod="openstack-operators/designate-operator-controller-manager-5d87c9d997-8q7nm" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.631690 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6n4qm\" (UniqueName: \"kubernetes.io/projected/aba839b0-d1ee-454e-b138-4e3656ea150d-kube-api-access-6n4qm\") pod \"heat-operator-controller-manager-cf99c678f-jn9zw\" (UID: \"aba839b0-d1ee-454e-b138-4e3656ea150d\") " pod="openstack-operators/heat-operator-controller-manager-cf99c678f-jn9zw" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.631779 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lpgql\" (UniqueName: \"kubernetes.io/projected/b54e196b-1f4b-4121-821f-a6751aef49ed-kube-api-access-lpgql\") pod \"barbican-operator-controller-manager-6db6876945-cq6ns\" (UID: \"b54e196b-1f4b-4121-821f-a6751aef49ed\") " pod="openstack-operators/barbican-operator-controller-manager-6db6876945-cq6ns" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.631832 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6xnhc\" (UniqueName: \"kubernetes.io/projected/d22e292e-57c3-4fc3-8730-813b100aa442-kube-api-access-6xnhc\") pod \"cinder-operator-controller-manager-55d77d7b5c-2t8hl\" (UID: \"d22e292e-57c3-4fc3-8730-813b100aa442\") " pod="openstack-operators/cinder-operator-controller-manager-55d77d7b5c-2t8hl" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.631859 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8w9h\" (UniqueName: \"kubernetes.io/projected/b80a0b4d-87b7-4185-94b6-4524d830f149-kube-api-access-n8w9h\") pod \"glance-operator-controller-manager-64db6967f8-bvglp\" (UID: \"b80a0b4d-87b7-4185-94b6-4524d830f149\") " pod="openstack-operators/glance-operator-controller-manager-64db6967f8-bvglp" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.642186 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-78bc7f9bd9-pvjd6"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.676332 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.677272 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.679749 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.679996 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-cbktj" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.686604 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-55d77d7b5c-2t8hl"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.715949 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.726233 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-545456dc4-5x4jc"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.727506 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-545456dc4-5x4jc" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.733004 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jd6rt\" (UniqueName: \"kubernetes.io/projected/1bb840aa-a248-4f16-8b8e-2710d728a7f8-kube-api-access-jd6rt\") pod \"designate-operator-controller-manager-5d87c9d997-8q7nm\" (UID: \"1bb840aa-a248-4f16-8b8e-2710d728a7f8\") " pod="openstack-operators/designate-operator-controller-manager-5d87c9d997-8q7nm" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.733061 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6n4qm\" (UniqueName: \"kubernetes.io/projected/aba839b0-d1ee-454e-b138-4e3656ea150d-kube-api-access-6n4qm\") pod \"heat-operator-controller-manager-cf99c678f-jn9zw\" (UID: \"aba839b0-d1ee-454e-b138-4e3656ea150d\") " pod="openstack-operators/heat-operator-controller-manager-cf99c678f-jn9zw" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.733104 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fa05256a-5601-4ac3-873d-eb58bd232401-cert\") pod \"infra-operator-controller-manager-f7fcc58b9-xs4j4\" (UID: \"fa05256a-5601-4ac3-873d-eb58bd232401\") " pod="openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.733120 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4hq9\" (UniqueName: \"kubernetes.io/projected/e438f213-61e7-4ce1-9d68-d14e4121ba26-kube-api-access-b4hq9\") pod \"horizon-operator-controller-manager-78bc7f9bd9-pvjd6\" (UID: \"e438f213-61e7-4ce1-9d68-d14e4121ba26\") " pod="openstack-operators/horizon-operator-controller-manager-78bc7f9bd9-pvjd6" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.733141 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lpgql\" (UniqueName: \"kubernetes.io/projected/b54e196b-1f4b-4121-821f-a6751aef49ed-kube-api-access-lpgql\") pod \"barbican-operator-controller-manager-6db6876945-cq6ns\" (UID: \"b54e196b-1f4b-4121-821f-a6751aef49ed\") " pod="openstack-operators/barbican-operator-controller-manager-6db6876945-cq6ns" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.733182 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zf2f\" (UniqueName: \"kubernetes.io/projected/fa05256a-5601-4ac3-873d-eb58bd232401-kube-api-access-4zf2f\") pod \"infra-operator-controller-manager-f7fcc58b9-xs4j4\" (UID: \"fa05256a-5601-4ac3-873d-eb58bd232401\") " pod="openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.733202 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6xnhc\" (UniqueName: \"kubernetes.io/projected/d22e292e-57c3-4fc3-8730-813b100aa442-kube-api-access-6xnhc\") pod \"cinder-operator-controller-manager-55d77d7b5c-2t8hl\" (UID: \"d22e292e-57c3-4fc3-8730-813b100aa442\") " pod="openstack-operators/cinder-operator-controller-manager-55d77d7b5c-2t8hl" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.733228 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8w9h\" (UniqueName: \"kubernetes.io/projected/b80a0b4d-87b7-4185-94b6-4524d830f149-kube-api-access-n8w9h\") pod \"glance-operator-controller-manager-64db6967f8-bvglp\" (UID: \"b80a0b4d-87b7-4185-94b6-4524d830f149\") " pod="openstack-operators/glance-operator-controller-manager-64db6967f8-bvglp" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.737468 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-kqvg5" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.755432 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55ffd4876b-wtrsc"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.756579 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-55ffd4876b-wtrsc" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.762164 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-vbtb4" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.763739 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-545456dc4-5x4jc"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.783117 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lpgql\" (UniqueName: \"kubernetes.io/projected/b54e196b-1f4b-4121-821f-a6751aef49ed-kube-api-access-lpgql\") pod \"barbican-operator-controller-manager-6db6876945-cq6ns\" (UID: \"b54e196b-1f4b-4121-821f-a6751aef49ed\") " pod="openstack-operators/barbican-operator-controller-manager-6db6876945-cq6ns" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.784475 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jd6rt\" (UniqueName: \"kubernetes.io/projected/1bb840aa-a248-4f16-8b8e-2710d728a7f8-kube-api-access-jd6rt\") pod \"designate-operator-controller-manager-5d87c9d997-8q7nm\" (UID: \"1bb840aa-a248-4f16-8b8e-2710d728a7f8\") " pod="openstack-operators/designate-operator-controller-manager-5d87c9d997-8q7nm" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.790975 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-67d996989d-wfwll"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.792286 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-67d996989d-wfwll" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.795613 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6xnhc\" (UniqueName: \"kubernetes.io/projected/d22e292e-57c3-4fc3-8730-813b100aa442-kube-api-access-6xnhc\") pod \"cinder-operator-controller-manager-55d77d7b5c-2t8hl\" (UID: \"d22e292e-57c3-4fc3-8730-813b100aa442\") " pod="openstack-operators/cinder-operator-controller-manager-55d77d7b5c-2t8hl" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.796655 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8w9h\" (UniqueName: \"kubernetes.io/projected/b80a0b4d-87b7-4185-94b6-4524d830f149-kube-api-access-n8w9h\") pod \"glance-operator-controller-manager-64db6967f8-bvglp\" (UID: \"b80a0b4d-87b7-4185-94b6-4524d830f149\") " pod="openstack-operators/glance-operator-controller-manager-64db6967f8-bvglp" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.797779 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-4rsrx" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.801156 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6n4qm\" (UniqueName: \"kubernetes.io/projected/aba839b0-d1ee-454e-b138-4e3656ea150d-kube-api-access-6n4qm\") pod \"heat-operator-controller-manager-cf99c678f-jn9zw\" (UID: \"aba839b0-d1ee-454e-b138-4e3656ea150d\") " pod="openstack-operators/heat-operator-controller-manager-cf99c678f-jn9zw" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.821130 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55ffd4876b-wtrsc"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.834226 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zf2f\" (UniqueName: \"kubernetes.io/projected/fa05256a-5601-4ac3-873d-eb58bd232401-kube-api-access-4zf2f\") pod \"infra-operator-controller-manager-f7fcc58b9-xs4j4\" (UID: \"fa05256a-5601-4ac3-873d-eb58bd232401\") " pod="openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.834297 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hrbp6\" (UniqueName: \"kubernetes.io/projected/6d51bdfc-e48a-44ff-a56c-9400e320fa7f-kube-api-access-hrbp6\") pod \"keystone-operator-controller-manager-55ffd4876b-wtrsc\" (UID: \"6d51bdfc-e48a-44ff-a56c-9400e320fa7f\") " pod="openstack-operators/keystone-operator-controller-manager-55ffd4876b-wtrsc" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.834332 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fps9c\" (UniqueName: \"kubernetes.io/projected/5ff960a3-98d8-4d3f-9116-2a0785aefb2e-kube-api-access-fps9c\") pod \"manila-operator-controller-manager-67d996989d-wfwll\" (UID: \"5ff960a3-98d8-4d3f-9116-2a0785aefb2e\") " pod="openstack-operators/manila-operator-controller-manager-67d996989d-wfwll" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.834385 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsgtp\" (UniqueName: \"kubernetes.io/projected/349bcf77-0fc4-4048-a66d-696798c3a6d4-kube-api-access-fsgtp\") pod \"ironic-operator-controller-manager-545456dc4-5x4jc\" (UID: \"349bcf77-0fc4-4048-a66d-696798c3a6d4\") " pod="openstack-operators/ironic-operator-controller-manager-545456dc4-5x4jc" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.834426 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fa05256a-5601-4ac3-873d-eb58bd232401-cert\") pod \"infra-operator-controller-manager-f7fcc58b9-xs4j4\" (UID: \"fa05256a-5601-4ac3-873d-eb58bd232401\") " pod="openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.834446 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4hq9\" (UniqueName: \"kubernetes.io/projected/e438f213-61e7-4ce1-9d68-d14e4121ba26-kube-api-access-b4hq9\") pod \"horizon-operator-controller-manager-78bc7f9bd9-pvjd6\" (UID: \"e438f213-61e7-4ce1-9d68-d14e4121ba26\") " pod="openstack-operators/horizon-operator-controller-manager-78bc7f9bd9-pvjd6" Feb 27 08:46:41 crc kubenswrapper[4906]: E0227 08:46:41.835027 4906 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 27 08:46:41 crc kubenswrapper[4906]: E0227 08:46:41.835076 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fa05256a-5601-4ac3-873d-eb58bd232401-cert podName:fa05256a-5601-4ac3-873d-eb58bd232401 nodeName:}" failed. No retries permitted until 2026-02-27 08:46:42.335058938 +0000 UTC m=+1100.729460548 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fa05256a-5601-4ac3-873d-eb58bd232401-cert") pod "infra-operator-controller-manager-f7fcc58b9-xs4j4" (UID: "fa05256a-5601-4ac3-873d-eb58bd232401") : secret "infra-operator-webhook-server-cert" not found Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.839028 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6db6876945-cq6ns" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.852047 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-67d996989d-wfwll"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.862744 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-55d77d7b5c-2t8hl" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.890337 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4hq9\" (UniqueName: \"kubernetes.io/projected/e438f213-61e7-4ce1-9d68-d14e4121ba26-kube-api-access-b4hq9\") pod \"horizon-operator-controller-manager-78bc7f9bd9-pvjd6\" (UID: \"e438f213-61e7-4ce1-9d68-d14e4121ba26\") " pod="openstack-operators/horizon-operator-controller-manager-78bc7f9bd9-pvjd6" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.892514 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-5d87c9d997-8q7nm" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.903638 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zf2f\" (UniqueName: \"kubernetes.io/projected/fa05256a-5601-4ac3-873d-eb58bd232401-kube-api-access-4zf2f\") pod \"infra-operator-controller-manager-f7fcc58b9-xs4j4\" (UID: \"fa05256a-5601-4ac3-873d-eb58bd232401\") " pod="openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.904057 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-cf99c678f-jn9zw" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.920558 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-54688575f-5w6ls"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.928141 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-54688575f-5w6ls" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.928373 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-64db6967f8-bvglp" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.933410 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-m9vdd" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.935345 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hrbp6\" (UniqueName: \"kubernetes.io/projected/6d51bdfc-e48a-44ff-a56c-9400e320fa7f-kube-api-access-hrbp6\") pod \"keystone-operator-controller-manager-55ffd4876b-wtrsc\" (UID: \"6d51bdfc-e48a-44ff-a56c-9400e320fa7f\") " pod="openstack-operators/keystone-operator-controller-manager-55ffd4876b-wtrsc" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.935410 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fps9c\" (UniqueName: \"kubernetes.io/projected/5ff960a3-98d8-4d3f-9116-2a0785aefb2e-kube-api-access-fps9c\") pod \"manila-operator-controller-manager-67d996989d-wfwll\" (UID: \"5ff960a3-98d8-4d3f-9116-2a0785aefb2e\") " pod="openstack-operators/manila-operator-controller-manager-67d996989d-wfwll" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.935454 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsgtp\" (UniqueName: \"kubernetes.io/projected/349bcf77-0fc4-4048-a66d-696798c3a6d4-kube-api-access-fsgtp\") pod \"ironic-operator-controller-manager-545456dc4-5x4jc\" (UID: \"349bcf77-0fc4-4048-a66d-696798c3a6d4\") " pod="openstack-operators/ironic-operator-controller-manager-545456dc4-5x4jc" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.940314 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-78bc7f9bd9-pvjd6" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.956366 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-556b8b874-fpnbv"] Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.958332 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-556b8b874-fpnbv" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.971940 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsgtp\" (UniqueName: \"kubernetes.io/projected/349bcf77-0fc4-4048-a66d-696798c3a6d4-kube-api-access-fsgtp\") pod \"ironic-operator-controller-manager-545456dc4-5x4jc\" (UID: \"349bcf77-0fc4-4048-a66d-696798c3a6d4\") " pod="openstack-operators/ironic-operator-controller-manager-545456dc4-5x4jc" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.973592 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-zjsrk" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.975709 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fps9c\" (UniqueName: \"kubernetes.io/projected/5ff960a3-98d8-4d3f-9116-2a0785aefb2e-kube-api-access-fps9c\") pod \"manila-operator-controller-manager-67d996989d-wfwll\" (UID: \"5ff960a3-98d8-4d3f-9116-2a0785aefb2e\") " pod="openstack-operators/manila-operator-controller-manager-67d996989d-wfwll" Feb 27 08:46:41 crc kubenswrapper[4906]: I0227 08:46:41.996725 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hrbp6\" (UniqueName: \"kubernetes.io/projected/6d51bdfc-e48a-44ff-a56c-9400e320fa7f-kube-api-access-hrbp6\") pod \"keystone-operator-controller-manager-55ffd4876b-wtrsc\" (UID: \"6d51bdfc-e48a-44ff-a56c-9400e320fa7f\") " pod="openstack-operators/keystone-operator-controller-manager-55ffd4876b-wtrsc" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.022925 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-74b6b5dc96-lsm2z"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.023854 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-74b6b5dc96-lsm2z" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.029314 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-cwb5m" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.036478 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jwrc\" (UniqueName: \"kubernetes.io/projected/5b1250ff-45df-43fc-a9fc-fa364b823c16-kube-api-access-7jwrc\") pod \"neutron-operator-controller-manager-54688575f-5w6ls\" (UID: \"5b1250ff-45df-43fc-a9fc-fa364b823c16\") " pod="openstack-operators/neutron-operator-controller-manager-54688575f-5w6ls" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.036518 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqsj2\" (UniqueName: \"kubernetes.io/projected/a9932342-3a2d-4621-b4f1-048d92eef4c2-kube-api-access-pqsj2\") pod \"mariadb-operator-controller-manager-556b8b874-fpnbv\" (UID: \"a9932342-3a2d-4621-b4f1-048d92eef4c2\") " pod="openstack-operators/mariadb-operator-controller-manager-556b8b874-fpnbv" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.040366 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-5d86c7ddb7-5h7k9"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.041289 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-5d86c7ddb7-5h7k9" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.044548 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-x7pm8" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.047529 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-556b8b874-fpnbv"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.053351 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-54688575f-5w6ls"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.062054 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-5d86c7ddb7-5h7k9"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.062323 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-545456dc4-5x4jc" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.137706 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f9g5j\" (UniqueName: \"kubernetes.io/projected/ec68c3b0-bb17-4c88-a478-e13e49063c7f-kube-api-access-f9g5j\") pod \"nova-operator-controller-manager-74b6b5dc96-lsm2z\" (UID: \"ec68c3b0-bb17-4c88-a478-e13e49063c7f\") " pod="openstack-operators/nova-operator-controller-manager-74b6b5dc96-lsm2z" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.137757 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szf6s\" (UniqueName: \"kubernetes.io/projected/c12e7f2b-60e4-4bb5-9b11-3ae935c649c2-kube-api-access-szf6s\") pod \"octavia-operator-controller-manager-5d86c7ddb7-5h7k9\" (UID: \"c12e7f2b-60e4-4bb5-9b11-3ae935c649c2\") " pod="openstack-operators/octavia-operator-controller-manager-5d86c7ddb7-5h7k9" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.137824 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-74b6b5dc96-lsm2z"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.137854 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jwrc\" (UniqueName: \"kubernetes.io/projected/5b1250ff-45df-43fc-a9fc-fa364b823c16-kube-api-access-7jwrc\") pod \"neutron-operator-controller-manager-54688575f-5w6ls\" (UID: \"5b1250ff-45df-43fc-a9fc-fa364b823c16\") " pod="openstack-operators/neutron-operator-controller-manager-54688575f-5w6ls" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.137909 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqsj2\" (UniqueName: \"kubernetes.io/projected/a9932342-3a2d-4621-b4f1-048d92eef4c2-kube-api-access-pqsj2\") pod \"mariadb-operator-controller-manager-556b8b874-fpnbv\" (UID: \"a9932342-3a2d-4621-b4f1-048d92eef4c2\") " pod="openstack-operators/mariadb-operator-controller-manager-556b8b874-fpnbv" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.163048 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-55ffd4876b-wtrsc" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.167038 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jwrc\" (UniqueName: \"kubernetes.io/projected/5b1250ff-45df-43fc-a9fc-fa364b823c16-kube-api-access-7jwrc\") pod \"neutron-operator-controller-manager-54688575f-5w6ls\" (UID: \"5b1250ff-45df-43fc-a9fc-fa364b823c16\") " pod="openstack-operators/neutron-operator-controller-manager-54688575f-5w6ls" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.204340 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-75684d597f-q59sz"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.209552 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xpzfp" podUID="38b40d50-8598-4279-a798-c499b5152457" containerName="registry-server" containerID="cri-o://29901429b7a8eff95f56596947e1db91c65bfc6117402c9737a8c6a5c09f5c28" gracePeriod=2 Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.210369 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqsj2\" (UniqueName: \"kubernetes.io/projected/a9932342-3a2d-4621-b4f1-048d92eef4c2-kube-api-access-pqsj2\") pod \"mariadb-operator-controller-manager-556b8b874-fpnbv\" (UID: \"a9932342-3a2d-4621-b4f1-048d92eef4c2\") " pod="openstack-operators/mariadb-operator-controller-manager-556b8b874-fpnbv" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.210792 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-75684d597f-q59sz"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.210928 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-75684d597f-q59sz" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.215940 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-8h2bk" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.234024 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-67d996989d-wfwll" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.247325 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.248349 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.249042 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r2vfl\" (UniqueName: \"kubernetes.io/projected/ab4168c1-442a-4218-bd19-a0194e2b4e59-kube-api-access-r2vfl\") pod \"ovn-operator-controller-manager-75684d597f-q59sz\" (UID: \"ab4168c1-442a-4218-bd19-a0194e2b4e59\") " pod="openstack-operators/ovn-operator-controller-manager-75684d597f-q59sz" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.249087 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f9g5j\" (UniqueName: \"kubernetes.io/projected/ec68c3b0-bb17-4c88-a478-e13e49063c7f-kube-api-access-f9g5j\") pod \"nova-operator-controller-manager-74b6b5dc96-lsm2z\" (UID: \"ec68c3b0-bb17-4c88-a478-e13e49063c7f\") " pod="openstack-operators/nova-operator-controller-manager-74b6b5dc96-lsm2z" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.250867 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.253533 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-vlhql" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.261024 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szf6s\" (UniqueName: \"kubernetes.io/projected/c12e7f2b-60e4-4bb5-9b11-3ae935c649c2-kube-api-access-szf6s\") pod \"octavia-operator-controller-manager-5d86c7ddb7-5h7k9\" (UID: \"c12e7f2b-60e4-4bb5-9b11-3ae935c649c2\") " pod="openstack-operators/octavia-operator-controller-manager-5d86c7ddb7-5h7k9" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.279468 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-54688575f-5w6ls" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.301754 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-648564c9fc-p8w7q"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.308237 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szf6s\" (UniqueName: \"kubernetes.io/projected/c12e7f2b-60e4-4bb5-9b11-3ae935c649c2-kube-api-access-szf6s\") pod \"octavia-operator-controller-manager-5d86c7ddb7-5h7k9\" (UID: \"c12e7f2b-60e4-4bb5-9b11-3ae935c649c2\") " pod="openstack-operators/octavia-operator-controller-manager-5d86c7ddb7-5h7k9" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.310051 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-648564c9fc-p8w7q" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.313573 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-556b8b874-fpnbv" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.315479 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f9g5j\" (UniqueName: \"kubernetes.io/projected/ec68c3b0-bb17-4c88-a478-e13e49063c7f-kube-api-access-f9g5j\") pod \"nova-operator-controller-manager-74b6b5dc96-lsm2z\" (UID: \"ec68c3b0-bb17-4c88-a478-e13e49063c7f\") " pod="openstack-operators/nova-operator-controller-manager-74b6b5dc96-lsm2z" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.317610 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-648564c9fc-p8w7q"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.317635 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-drtf9" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.325898 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.340499 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-9b9ff9f4d-sdhtz"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.342021 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-9b9ff9f4d-sdhtz" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.342715 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-9b9ff9f4d-sdhtz"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.346828 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-7mqd5" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.347683 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5fdb694969-28cxl"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.348639 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5fdb694969-28cxl" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.350668 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-62hj4" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.359292 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5fdb694969-28cxl"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.363635 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-74b6b5dc96-lsm2z" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.364190 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r2vfl\" (UniqueName: \"kubernetes.io/projected/ab4168c1-442a-4218-bd19-a0194e2b4e59-kube-api-access-r2vfl\") pod \"ovn-operator-controller-manager-75684d597f-q59sz\" (UID: \"ab4168c1-442a-4218-bd19-a0194e2b4e59\") " pod="openstack-operators/ovn-operator-controller-manager-75684d597f-q59sz" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.364653 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fa05256a-5601-4ac3-873d-eb58bd232401-cert\") pod \"infra-operator-controller-manager-f7fcc58b9-xs4j4\" (UID: \"fa05256a-5601-4ac3-873d-eb58bd232401\") " pod="openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4" Feb 27 08:46:42 crc kubenswrapper[4906]: E0227 08:46:42.364803 4906 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 27 08:46:42 crc kubenswrapper[4906]: E0227 08:46:42.364893 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fa05256a-5601-4ac3-873d-eb58bd232401-cert podName:fa05256a-5601-4ac3-873d-eb58bd232401 nodeName:}" failed. No retries permitted until 2026-02-27 08:46:43.364853819 +0000 UTC m=+1101.759255429 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fa05256a-5601-4ac3-873d-eb58bd232401-cert") pod "infra-operator-controller-manager-f7fcc58b9-xs4j4" (UID: "fa05256a-5601-4ac3-873d-eb58bd232401") : secret "infra-operator-webhook-server-cert" not found Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.365588 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clfnv\" (UniqueName: \"kubernetes.io/projected/39722076-5ed2-4e53-bb1d-d2a8bc73b825-kube-api-access-clfnv\") pod \"placement-operator-controller-manager-648564c9fc-p8w7q\" (UID: \"39722076-5ed2-4e53-bb1d-d2a8bc73b825\") " pod="openstack-operators/placement-operator-controller-manager-648564c9fc-p8w7q" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.365737 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/775eec2f-3f17-4413-b454-0248b5cb7817-cert\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b\" (UID: \"775eec2f-3f17-4413-b454-0248b5cb7817\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.365846 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhv8k\" (UniqueName: \"kubernetes.io/projected/775eec2f-3f17-4413-b454-0248b5cb7817-kube-api-access-vhv8k\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b\" (UID: \"775eec2f-3f17-4413-b454-0248b5cb7817\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.366502 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2mj9c"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.368677 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2mj9c" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.386700 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2mj9c"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.392003 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-5d86c7ddb7-5h7k9" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.393139 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r2vfl\" (UniqueName: \"kubernetes.io/projected/ab4168c1-442a-4218-bd19-a0194e2b4e59-kube-api-access-r2vfl\") pod \"ovn-operator-controller-manager-75684d597f-q59sz\" (UID: \"ab4168c1-442a-4218-bd19-a0194e2b4e59\") " pod="openstack-operators/ovn-operator-controller-manager-75684d597f-q59sz" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.400858 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-55b5ff4dbb-s9mn2"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.402561 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-55b5ff4dbb-s9mn2" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.411227 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-55b5ff4dbb-s9mn2"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.414720 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-lzxmv" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.433190 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-bccc79885-p8gxp"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.437242 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-bccc79885-p8gxp" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.439508 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-nqhb7" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.438713 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-bccc79885-p8gxp"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.451711 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.454399 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.455543 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.457132 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.458748 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-25pgh" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.459350 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.466508 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/775eec2f-3f17-4413-b454-0248b5cb7817-cert\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b\" (UID: \"775eec2f-3f17-4413-b454-0248b5cb7817\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.466551 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmg6r\" (UniqueName: \"kubernetes.io/projected/4b06e9fa-a6b4-4277-a4ea-d0724bc40002-kube-api-access-rmg6r\") pod \"test-operator-controller-manager-55b5ff4dbb-s9mn2\" (UID: \"4b06e9fa-a6b4-4277-a4ea-d0724bc40002\") " pod="openstack-operators/test-operator-controller-manager-55b5ff4dbb-s9mn2" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.466576 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhv8k\" (UniqueName: \"kubernetes.io/projected/775eec2f-3f17-4413-b454-0248b5cb7817-kube-api-access-vhv8k\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b\" (UID: \"775eec2f-3f17-4413-b454-0248b5cb7817\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.466596 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvf6b\" (UniqueName: \"kubernetes.io/projected/52d0c5f6-ad2b-460d-8a8d-a9f4bc3fed06-kube-api-access-dvf6b\") pod \"telemetry-operator-controller-manager-5fdb694969-28cxl\" (UID: \"52d0c5f6-ad2b-460d-8a8d-a9f4bc3fed06\") " pod="openstack-operators/telemetry-operator-controller-manager-5fdb694969-28cxl" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.466622 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wntj7\" (UniqueName: \"kubernetes.io/projected/470d801d-7d83-4b70-ba47-f2d93ef9ebfc-kube-api-access-wntj7\") pod \"swift-operator-controller-manager-9b9ff9f4d-sdhtz\" (UID: \"470d801d-7d83-4b70-ba47-f2d93ef9ebfc\") " pod="openstack-operators/swift-operator-controller-manager-9b9ff9f4d-sdhtz" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.466639 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5pvr\" (UniqueName: \"kubernetes.io/projected/744c08ea-1798-4480-9a26-d5ec4c3843e2-kube-api-access-j5pvr\") pod \"redhat-marketplace-2mj9c\" (UID: \"744c08ea-1798-4480-9a26-d5ec4c3843e2\") " pod="openshift-marketplace/redhat-marketplace-2mj9c" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.466740 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/744c08ea-1798-4480-9a26-d5ec4c3843e2-utilities\") pod \"redhat-marketplace-2mj9c\" (UID: \"744c08ea-1798-4480-9a26-d5ec4c3843e2\") " pod="openshift-marketplace/redhat-marketplace-2mj9c" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.466760 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clfnv\" (UniqueName: \"kubernetes.io/projected/39722076-5ed2-4e53-bb1d-d2a8bc73b825-kube-api-access-clfnv\") pod \"placement-operator-controller-manager-648564c9fc-p8w7q\" (UID: \"39722076-5ed2-4e53-bb1d-d2a8bc73b825\") " pod="openstack-operators/placement-operator-controller-manager-648564c9fc-p8w7q" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.466781 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/744c08ea-1798-4480-9a26-d5ec4c3843e2-catalog-content\") pod \"redhat-marketplace-2mj9c\" (UID: \"744c08ea-1798-4480-9a26-d5ec4c3843e2\") " pod="openshift-marketplace/redhat-marketplace-2mj9c" Feb 27 08:46:42 crc kubenswrapper[4906]: E0227 08:46:42.466916 4906 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 27 08:46:42 crc kubenswrapper[4906]: E0227 08:46:42.466957 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/775eec2f-3f17-4413-b454-0248b5cb7817-cert podName:775eec2f-3f17-4413-b454-0248b5cb7817 nodeName:}" failed. No retries permitted until 2026-02-27 08:46:42.966943153 +0000 UTC m=+1101.361344763 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/775eec2f-3f17-4413-b454-0248b5cb7817-cert") pod "openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" (UID: "775eec2f-3f17-4413-b454-0248b5cb7817") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.495399 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pppgx"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.499948 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pppgx"] Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.501669 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pppgx" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.512437 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-6hw97" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.550301 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clfnv\" (UniqueName: \"kubernetes.io/projected/39722076-5ed2-4e53-bb1d-d2a8bc73b825-kube-api-access-clfnv\") pod \"placement-operator-controller-manager-648564c9fc-p8w7q\" (UID: \"39722076-5ed2-4e53-bb1d-d2a8bc73b825\") " pod="openstack-operators/placement-operator-controller-manager-648564c9fc-p8w7q" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.560784 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhv8k\" (UniqueName: \"kubernetes.io/projected/775eec2f-3f17-4413-b454-0248b5cb7817-kube-api-access-vhv8k\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b\" (UID: \"775eec2f-3f17-4413-b454-0248b5cb7817\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.568534 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.568611 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvf6b\" (UniqueName: \"kubernetes.io/projected/52d0c5f6-ad2b-460d-8a8d-a9f4bc3fed06-kube-api-access-dvf6b\") pod \"telemetry-operator-controller-manager-5fdb694969-28cxl\" (UID: \"52d0c5f6-ad2b-460d-8a8d-a9f4bc3fed06\") " pod="openstack-operators/telemetry-operator-controller-manager-5fdb694969-28cxl" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.568805 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wntj7\" (UniqueName: \"kubernetes.io/projected/470d801d-7d83-4b70-ba47-f2d93ef9ebfc-kube-api-access-wntj7\") pod \"swift-operator-controller-manager-9b9ff9f4d-sdhtz\" (UID: \"470d801d-7d83-4b70-ba47-f2d93ef9ebfc\") " pod="openstack-operators/swift-operator-controller-manager-9b9ff9f4d-sdhtz" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.568839 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5pvr\" (UniqueName: \"kubernetes.io/projected/744c08ea-1798-4480-9a26-d5ec4c3843e2-kube-api-access-j5pvr\") pod \"redhat-marketplace-2mj9c\" (UID: \"744c08ea-1798-4480-9a26-d5ec4c3843e2\") " pod="openshift-marketplace/redhat-marketplace-2mj9c" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.568929 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cg6md\" (UniqueName: \"kubernetes.io/projected/dfcc34b3-13fc-4a7f-ab38-45744608591e-kube-api-access-cg6md\") pod \"watcher-operator-controller-manager-bccc79885-p8gxp\" (UID: \"dfcc34b3-13fc-4a7f-ab38-45744608591e\") " pod="openstack-operators/watcher-operator-controller-manager-bccc79885-p8gxp" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.568988 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.569047 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/744c08ea-1798-4480-9a26-d5ec4c3843e2-utilities\") pod \"redhat-marketplace-2mj9c\" (UID: \"744c08ea-1798-4480-9a26-d5ec4c3843e2\") " pod="openshift-marketplace/redhat-marketplace-2mj9c" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.569445 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/744c08ea-1798-4480-9a26-d5ec4c3843e2-catalog-content\") pod \"redhat-marketplace-2mj9c\" (UID: \"744c08ea-1798-4480-9a26-d5ec4c3843e2\") " pod="openshift-marketplace/redhat-marketplace-2mj9c" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.569497 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6whf\" (UniqueName: \"kubernetes.io/projected/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-kube-api-access-t6whf\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.570142 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/744c08ea-1798-4480-9a26-d5ec4c3843e2-utilities\") pod \"redhat-marketplace-2mj9c\" (UID: \"744c08ea-1798-4480-9a26-d5ec4c3843e2\") " pod="openshift-marketplace/redhat-marketplace-2mj9c" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.570230 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9wfl\" (UniqueName: \"kubernetes.io/projected/37c2354e-5123-4644-ac6e-416ab22ecde4-kube-api-access-p9wfl\") pod \"rabbitmq-cluster-operator-manager-668c99d594-pppgx\" (UID: \"37c2354e-5123-4644-ac6e-416ab22ecde4\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pppgx" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.570332 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmg6r\" (UniqueName: \"kubernetes.io/projected/4b06e9fa-a6b4-4277-a4ea-d0724bc40002-kube-api-access-rmg6r\") pod \"test-operator-controller-manager-55b5ff4dbb-s9mn2\" (UID: \"4b06e9fa-a6b4-4277-a4ea-d0724bc40002\") " pod="openstack-operators/test-operator-controller-manager-55b5ff4dbb-s9mn2" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.570368 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/744c08ea-1798-4480-9a26-d5ec4c3843e2-catalog-content\") pod \"redhat-marketplace-2mj9c\" (UID: \"744c08ea-1798-4480-9a26-d5ec4c3843e2\") " pod="openshift-marketplace/redhat-marketplace-2mj9c" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.605546 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wntj7\" (UniqueName: \"kubernetes.io/projected/470d801d-7d83-4b70-ba47-f2d93ef9ebfc-kube-api-access-wntj7\") pod \"swift-operator-controller-manager-9b9ff9f4d-sdhtz\" (UID: \"470d801d-7d83-4b70-ba47-f2d93ef9ebfc\") " pod="openstack-operators/swift-operator-controller-manager-9b9ff9f4d-sdhtz" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.606713 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmg6r\" (UniqueName: \"kubernetes.io/projected/4b06e9fa-a6b4-4277-a4ea-d0724bc40002-kube-api-access-rmg6r\") pod \"test-operator-controller-manager-55b5ff4dbb-s9mn2\" (UID: \"4b06e9fa-a6b4-4277-a4ea-d0724bc40002\") " pod="openstack-operators/test-operator-controller-manager-55b5ff4dbb-s9mn2" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.620423 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvf6b\" (UniqueName: \"kubernetes.io/projected/52d0c5f6-ad2b-460d-8a8d-a9f4bc3fed06-kube-api-access-dvf6b\") pod \"telemetry-operator-controller-manager-5fdb694969-28cxl\" (UID: \"52d0c5f6-ad2b-460d-8a8d-a9f4bc3fed06\") " pod="openstack-operators/telemetry-operator-controller-manager-5fdb694969-28cxl" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.634517 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5pvr\" (UniqueName: \"kubernetes.io/projected/744c08ea-1798-4480-9a26-d5ec4c3843e2-kube-api-access-j5pvr\") pod \"redhat-marketplace-2mj9c\" (UID: \"744c08ea-1798-4480-9a26-d5ec4c3843e2\") " pod="openshift-marketplace/redhat-marketplace-2mj9c" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.640355 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-9b9ff9f4d-sdhtz" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.672450 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.672538 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6whf\" (UniqueName: \"kubernetes.io/projected/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-kube-api-access-t6whf\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.672557 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9wfl\" (UniqueName: \"kubernetes.io/projected/37c2354e-5123-4644-ac6e-416ab22ecde4-kube-api-access-p9wfl\") pod \"rabbitmq-cluster-operator-manager-668c99d594-pppgx\" (UID: \"37c2354e-5123-4644-ac6e-416ab22ecde4\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pppgx" Feb 27 08:46:42 crc kubenswrapper[4906]: E0227 08:46:42.673098 4906 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 27 08:46:42 crc kubenswrapper[4906]: E0227 08:46:42.673145 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs podName:c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c nodeName:}" failed. No retries permitted until 2026-02-27 08:46:43.173127955 +0000 UTC m=+1101.567529555 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs") pod "openstack-operator-controller-manager-6dfddd8f7d-xcp5b" (UID: "c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c") : secret "webhook-server-cert" not found Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.673401 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.673475 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cg6md\" (UniqueName: \"kubernetes.io/projected/dfcc34b3-13fc-4a7f-ab38-45744608591e-kube-api-access-cg6md\") pod \"watcher-operator-controller-manager-bccc79885-p8gxp\" (UID: \"dfcc34b3-13fc-4a7f-ab38-45744608591e\") " pod="openstack-operators/watcher-operator-controller-manager-bccc79885-p8gxp" Feb 27 08:46:42 crc kubenswrapper[4906]: E0227 08:46:42.673658 4906 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 27 08:46:42 crc kubenswrapper[4906]: E0227 08:46:42.673681 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs podName:c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c nodeName:}" failed. No retries permitted until 2026-02-27 08:46:43.17367434 +0000 UTC m=+1101.568075950 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs") pod "openstack-operator-controller-manager-6dfddd8f7d-xcp5b" (UID: "c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c") : secret "metrics-server-cert" not found Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.689759 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-75684d597f-q59sz" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.739146 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cg6md\" (UniqueName: \"kubernetes.io/projected/dfcc34b3-13fc-4a7f-ab38-45744608591e-kube-api-access-cg6md\") pod \"watcher-operator-controller-manager-bccc79885-p8gxp\" (UID: \"dfcc34b3-13fc-4a7f-ab38-45744608591e\") " pod="openstack-operators/watcher-operator-controller-manager-bccc79885-p8gxp" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.739388 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6whf\" (UniqueName: \"kubernetes.io/projected/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-kube-api-access-t6whf\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.744063 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9wfl\" (UniqueName: \"kubernetes.io/projected/37c2354e-5123-4644-ac6e-416ab22ecde4-kube-api-access-p9wfl\") pod \"rabbitmq-cluster-operator-manager-668c99d594-pppgx\" (UID: \"37c2354e-5123-4644-ac6e-416ab22ecde4\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pppgx" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.755813 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5fdb694969-28cxl" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.771634 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-648564c9fc-p8w7q" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.918217 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2mj9c" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.946849 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-55b5ff4dbb-s9mn2" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.967590 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-bccc79885-p8gxp" Feb 27 08:46:42 crc kubenswrapper[4906]: I0227 08:46:42.979460 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/775eec2f-3f17-4413-b454-0248b5cb7817-cert\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b\" (UID: \"775eec2f-3f17-4413-b454-0248b5cb7817\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" Feb 27 08:46:42 crc kubenswrapper[4906]: E0227 08:46:42.979722 4906 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 27 08:46:42 crc kubenswrapper[4906]: E0227 08:46:42.979786 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/775eec2f-3f17-4413-b454-0248b5cb7817-cert podName:775eec2f-3f17-4413-b454-0248b5cb7817 nodeName:}" failed. No retries permitted until 2026-02-27 08:46:43.979767568 +0000 UTC m=+1102.374169178 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/775eec2f-3f17-4413-b454-0248b5cb7817-cert") pod "openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" (UID: "775eec2f-3f17-4413-b454-0248b5cb7817") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 27 08:46:43 crc kubenswrapper[4906]: I0227 08:46:43.017458 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pppgx" Feb 27 08:46:43 crc kubenswrapper[4906]: I0227 08:46:43.054009 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-55d77d7b5c-2t8hl"] Feb 27 08:46:43 crc kubenswrapper[4906]: W0227 08:46:43.099324 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd22e292e_57c3_4fc3_8730_813b100aa442.slice/crio-73f9aadb6773d53f314d5c87b6c6d051f778231dfd98f370434f3b47c9431ad6 WatchSource:0}: Error finding container 73f9aadb6773d53f314d5c87b6c6d051f778231dfd98f370434f3b47c9431ad6: Status 404 returned error can't find the container with id 73f9aadb6773d53f314d5c87b6c6d051f778231dfd98f370434f3b47c9431ad6 Feb 27 08:46:43 crc kubenswrapper[4906]: I0227 08:46:43.182913 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:46:43 crc kubenswrapper[4906]: I0227 08:46:43.183006 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:46:43 crc kubenswrapper[4906]: E0227 08:46:43.183315 4906 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 27 08:46:43 crc kubenswrapper[4906]: E0227 08:46:43.183381 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs podName:c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c nodeName:}" failed. No retries permitted until 2026-02-27 08:46:44.183363192 +0000 UTC m=+1102.577764802 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs") pod "openstack-operator-controller-manager-6dfddd8f7d-xcp5b" (UID: "c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c") : secret "webhook-server-cert" not found Feb 27 08:46:43 crc kubenswrapper[4906]: E0227 08:46:43.183719 4906 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 27 08:46:43 crc kubenswrapper[4906]: E0227 08:46:43.183750 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs podName:c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c nodeName:}" failed. No retries permitted until 2026-02-27 08:46:44.183742142 +0000 UTC m=+1102.578143752 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs") pod "openstack-operator-controller-manager-6dfddd8f7d-xcp5b" (UID: "c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c") : secret "metrics-server-cert" not found Feb 27 08:46:43 crc kubenswrapper[4906]: I0227 08:46:43.213205 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-55d77d7b5c-2t8hl" event={"ID":"d22e292e-57c3-4fc3-8730-813b100aa442","Type":"ContainerStarted","Data":"73f9aadb6773d53f314d5c87b6c6d051f778231dfd98f370434f3b47c9431ad6"} Feb 27 08:46:43 crc kubenswrapper[4906]: I0227 08:46:43.397793 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fa05256a-5601-4ac3-873d-eb58bd232401-cert\") pod \"infra-operator-controller-manager-f7fcc58b9-xs4j4\" (UID: \"fa05256a-5601-4ac3-873d-eb58bd232401\") " pod="openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4" Feb 27 08:46:43 crc kubenswrapper[4906]: E0227 08:46:43.398069 4906 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 27 08:46:43 crc kubenswrapper[4906]: E0227 08:46:43.398129 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fa05256a-5601-4ac3-873d-eb58bd232401-cert podName:fa05256a-5601-4ac3-873d-eb58bd232401 nodeName:}" failed. No retries permitted until 2026-02-27 08:46:45.398111389 +0000 UTC m=+1103.792512999 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fa05256a-5601-4ac3-873d-eb58bd232401-cert") pod "infra-operator-controller-manager-f7fcc58b9-xs4j4" (UID: "fa05256a-5601-4ac3-873d-eb58bd232401") : secret "infra-operator-webhook-server-cert" not found Feb 27 08:46:43 crc kubenswrapper[4906]: I0227 08:46:43.608907 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-cf99c678f-jn9zw"] Feb 27 08:46:43 crc kubenswrapper[4906]: W0227 08:46:43.615591 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod349bcf77_0fc4_4048_a66d_696798c3a6d4.slice/crio-42e0b3aa61be7603eae3015789b10851982111886ec906b994f0d3e553504c91 WatchSource:0}: Error finding container 42e0b3aa61be7603eae3015789b10851982111886ec906b994f0d3e553504c91: Status 404 returned error can't find the container with id 42e0b3aa61be7603eae3015789b10851982111886ec906b994f0d3e553504c91 Feb 27 08:46:43 crc kubenswrapper[4906]: I0227 08:46:43.616257 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-545456dc4-5x4jc"] Feb 27 08:46:43 crc kubenswrapper[4906]: W0227 08:46:43.621040 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaba839b0_d1ee_454e_b138_4e3656ea150d.slice/crio-5eb9c13c48d522acbe74d7873977bcb5be8fef166584049119901bad7725e2ad WatchSource:0}: Error finding container 5eb9c13c48d522acbe74d7873977bcb5be8fef166584049119901bad7725e2ad: Status 404 returned error can't find the container with id 5eb9c13c48d522acbe74d7873977bcb5be8fef166584049119901bad7725e2ad Feb 27 08:46:43 crc kubenswrapper[4906]: I0227 08:46:43.721818 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-5d87c9d997-8q7nm"] Feb 27 08:46:43 crc kubenswrapper[4906]: I0227 08:46:43.744266 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-64db6967f8-bvglp"] Feb 27 08:46:43 crc kubenswrapper[4906]: W0227 08:46:43.812184 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5ff960a3_98d8_4d3f_9116_2a0785aefb2e.slice/crio-0ff5161a9b063a7d1d5e0ead23dc5e7172d6d5b65b8f897c5a167a19f5ea4dbc WatchSource:0}: Error finding container 0ff5161a9b063a7d1d5e0ead23dc5e7172d6d5b65b8f897c5a167a19f5ea4dbc: Status 404 returned error can't find the container with id 0ff5161a9b063a7d1d5e0ead23dc5e7172d6d5b65b8f897c5a167a19f5ea4dbc Feb 27 08:46:43 crc kubenswrapper[4906]: I0227 08:46:43.823353 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-67d996989d-wfwll"] Feb 27 08:46:43 crc kubenswrapper[4906]: W0227 08:46:43.833567 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode438f213_61e7_4ce1_9d68_d14e4121ba26.slice/crio-195d1f819cee21b5ef0b4057ff8edeb75cb90e2aac7d998206766061b9bc5df9 WatchSource:0}: Error finding container 195d1f819cee21b5ef0b4057ff8edeb75cb90e2aac7d998206766061b9bc5df9: Status 404 returned error can't find the container with id 195d1f819cee21b5ef0b4057ff8edeb75cb90e2aac7d998206766061b9bc5df9 Feb 27 08:46:43 crc kubenswrapper[4906]: I0227 08:46:43.839248 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6db6876945-cq6ns"] Feb 27 08:46:43 crc kubenswrapper[4906]: I0227 08:46:43.853921 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-78bc7f9bd9-pvjd6"] Feb 27 08:46:43 crc kubenswrapper[4906]: W0227 08:46:43.857158 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda9932342_3a2d_4621_b4f1_048d92eef4c2.slice/crio-8b1e60d449b6859fbab9af12b86d4b686aa08dd34246743ca62cda8a3baa155e WatchSource:0}: Error finding container 8b1e60d449b6859fbab9af12b86d4b686aa08dd34246743ca62cda8a3baa155e: Status 404 returned error can't find the container with id 8b1e60d449b6859fbab9af12b86d4b686aa08dd34246743ca62cda8a3baa155e Feb 27 08:46:43 crc kubenswrapper[4906]: W0227 08:46:43.863012 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc12e7f2b_60e4_4bb5_9b11_3ae935c649c2.slice/crio-f7b4dbc1b6251820e14f21c1cc21afe54a9f4055f2364da43ed98502eef53632 WatchSource:0}: Error finding container f7b4dbc1b6251820e14f21c1cc21afe54a9f4055f2364da43ed98502eef53632: Status 404 returned error can't find the container with id f7b4dbc1b6251820e14f21c1cc21afe54a9f4055f2364da43ed98502eef53632 Feb 27 08:46:43 crc kubenswrapper[4906]: I0227 08:46:43.865001 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-556b8b874-fpnbv"] Feb 27 08:46:43 crc kubenswrapper[4906]: I0227 08:46:43.873432 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-5d86c7ddb7-5h7k9"] Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.006856 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/775eec2f-3f17-4413-b454-0248b5cb7817-cert\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b\" (UID: \"775eec2f-3f17-4413-b454-0248b5cb7817\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.007132 4906 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.007286 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/775eec2f-3f17-4413-b454-0248b5cb7817-cert podName:775eec2f-3f17-4413-b454-0248b5cb7817 nodeName:}" failed. No retries permitted until 2026-02-27 08:46:46.007259956 +0000 UTC m=+1104.401661566 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/775eec2f-3f17-4413-b454-0248b5cb7817-cert") pod "openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" (UID: "775eec2f-3f17-4413-b454-0248b5cb7817") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.038029 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-74b6b5dc96-lsm2z"] Feb 27 08:46:44 crc kubenswrapper[4906]: W0227 08:46:44.044996 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6d51bdfc_e48a_44ff_a56c_9400e320fa7f.slice/crio-366d315455eb92db908be02f59a62a0e34d03368a0c9f78c0f3ce7a1d9fd48e1 WatchSource:0}: Error finding container 366d315455eb92db908be02f59a62a0e34d03368a0c9f78c0f3ce7a1d9fd48e1: Status 404 returned error can't find the container with id 366d315455eb92db908be02f59a62a0e34d03368a0c9f78c0f3ce7a1d9fd48e1 Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.053848 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-648564c9fc-p8w7q"] Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.060954 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-55ffd4876b-wtrsc"] Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.070363 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-54688575f-5w6ls"] Feb 27 08:46:44 crc kubenswrapper[4906]: W0227 08:46:44.079015 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podec68c3b0_bb17_4c88_a478_e13e49063c7f.slice/crio-39d68d6cef62c13ea7cf73cf87c0fc2a9d9e0d63532a1bedcd6d4f779b1054f9 WatchSource:0}: Error finding container 39d68d6cef62c13ea7cf73cf87c0fc2a9d9e0d63532a1bedcd6d4f779b1054f9: Status 404 returned error can't find the container with id 39d68d6cef62c13ea7cf73cf87c0fc2a9d9e0d63532a1bedcd6d4f779b1054f9 Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.079256 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pppgx"] Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.087405 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-55b5ff4dbb-s9mn2"] Feb 27 08:46:44 crc kubenswrapper[4906]: W0227 08:46:44.088832 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4b06e9fa_a6b4_4277_a4ea_d0724bc40002.slice/crio-f282e9bf1a21cffb6ce1fd7f3e9800597cec1ec16f3689ea889c774f5eeac579 WatchSource:0}: Error finding container f282e9bf1a21cffb6ce1fd7f3e9800597cec1ec16f3689ea889c774f5eeac579: Status 404 returned error can't find the container with id f282e9bf1a21cffb6ce1fd7f3e9800597cec1ec16f3689ea889c774f5eeac579 Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.091154 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:9d03f03aa9a460f1fcac8875064808c03e4ecd0388873bbfb9c7dc58331f3968,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rmg6r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-55b5ff4dbb-s9mn2_openstack-operators(4b06e9fa-a6b4-4277-a4ea-d0724bc40002): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.093681 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/test-operator-controller-manager-55b5ff4dbb-s9mn2" podUID="4b06e9fa-a6b4-4277-a4ea-d0724bc40002" Feb 27 08:46:44 crc kubenswrapper[4906]: W0227 08:46:44.094767 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddfcc34b3_13fc_4a7f_ab38_45744608591e.slice/crio-4005a1c0d435aad4bf55e4b617ed1f0363bedce80d72a3abfe696676ea2e98be WatchSource:0}: Error finding container 4005a1c0d435aad4bf55e4b617ed1f0363bedce80d72a3abfe696676ea2e98be: Status 404 returned error can't find the container with id 4005a1c0d435aad4bf55e4b617ed1f0363bedce80d72a3abfe696676ea2e98be Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.097299 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-bccc79885-p8gxp"] Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.099267 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:172f24bd4603ac3498536a8a2c8fffb07cf9113dd52bc132778ea0aa275c6b84,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-f9g5j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-74b6b5dc96-lsm2z_openstack-operators(ec68c3b0-bb17-4c88-a478-e13e49063c7f): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.100556 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/nova-operator-controller-manager-74b6b5dc96-lsm2z" podUID="ec68c3b0-bb17-4c88-a478-e13e49063c7f" Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.101383 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2mj9c"] Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.108606 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:06311600a491c689493552e7ff26e36df740fa4e7c143fca874bef19f24afb97,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cg6md,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-bccc79885-p8gxp_openstack-operators(dfcc34b3-13fc-4a7f-ab38-45744608591e): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.110146 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-bccc79885-p8gxp" podUID="dfcc34b3-13fc-4a7f-ab38-45744608591e" Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.114923 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-9b9ff9f4d-sdhtz"] Feb 27 08:46:44 crc kubenswrapper[4906]: W0227 08:46:44.119249 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod744c08ea_1798_4480_9a26_d5ec4c3843e2.slice/crio-0c8358291a2bb9a6d3b52e88f732a98f42a4eed6659bff819b96b05d4ad08d2f WatchSource:0}: Error finding container 0c8358291a2bb9a6d3b52e88f732a98f42a4eed6659bff819b96b05d4ad08d2f: Status 404 returned error can't find the container with id 0c8358291a2bb9a6d3b52e88f732a98f42a4eed6659bff819b96b05d4ad08d2f Feb 27 08:46:44 crc kubenswrapper[4906]: W0227 08:46:44.122470 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5b1250ff_45df_43fc_a9fc_fa364b823c16.slice/crio-dcf69f6f5d6fd37f699eafc054846188e45cec05f24b155859f3df7f462ac0df WatchSource:0}: Error finding container dcf69f6f5d6fd37f699eafc054846188e45cec05f24b155859f3df7f462ac0df: Status 404 returned error can't find the container with id dcf69f6f5d6fd37f699eafc054846188e45cec05f24b155859f3df7f462ac0df Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.127939 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5fdb694969-28cxl"] Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.128630 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:b242403a27609ac87a0ed3a7dd788aceaf8f3da3620981cf5e000d56862d77a4,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-7jwrc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-54688575f-5w6ls_openstack-operators(5b1250ff-45df-43fc-a9fc-fa364b823c16): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.130067 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/neutron-operator-controller-manager-54688575f-5w6ls" podUID="5b1250ff-45df-43fc-a9fc-fa364b823c16" Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.134963 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:f309cdea8084a4b1e8cbcd732d6e250fd93c55cfd1b48ba9026907c8591faab7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wntj7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-9b9ff9f4d-sdhtz_openstack-operators(470d801d-7d83-4b70-ba47-f2d93ef9ebfc): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.136326 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-9b9ff9f4d-sdhtz" podUID="470d801d-7d83-4b70-ba47-f2d93ef9ebfc" Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.137714 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:9f73c84a9581b5739d8da333c7b64403d7b7ca284b22c624d0effe07f3d2819c,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-r2vfl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-75684d597f-q59sz_openstack-operators(ab4168c1-442a-4218-bd19-a0194e2b4e59): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.137820 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-75684d597f-q59sz"] Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.138929 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-75684d597f-q59sz" podUID="ab4168c1-442a-4218-bd19-a0194e2b4e59" Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.173681 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:1b9074a4ce16396d8bd2d30a475fc8c2f004f75a023e3eef8950661e89c0bcc6,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dvf6b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-5fdb694969-28cxl_openstack-operators(52d0c5f6-ad2b-460d-8a8d-a9f4bc3fed06): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.175023 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-5fdb694969-28cxl" podUID="52d0c5f6-ad2b-460d-8a8d-a9f4bc3fed06" Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.212902 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.213009 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.213168 4906 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.213224 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs podName:c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c nodeName:}" failed. No retries permitted until 2026-02-27 08:46:46.213206572 +0000 UTC m=+1104.607608182 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs") pod "openstack-operator-controller-manager-6dfddd8f7d-xcp5b" (UID: "c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c") : secret "metrics-server-cert" not found Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.213584 4906 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.213607 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs podName:c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c nodeName:}" failed. No retries permitted until 2026-02-27 08:46:46.213600622 +0000 UTC m=+1104.608002232 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs") pod "openstack-operator-controller-manager-6dfddd8f7d-xcp5b" (UID: "c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c") : secret "webhook-server-cert" not found Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.247170 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9b9ff9f4d-sdhtz" event={"ID":"470d801d-7d83-4b70-ba47-f2d93ef9ebfc","Type":"ContainerStarted","Data":"af6e1913c13592225c9b500a6721770ffde9cd205e2696e85025dfae039b1bb2"} Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.251146 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:f309cdea8084a4b1e8cbcd732d6e250fd93c55cfd1b48ba9026907c8591faab7\\\"\"" pod="openstack-operators/swift-operator-controller-manager-9b9ff9f4d-sdhtz" podUID="470d801d-7d83-4b70-ba47-f2d93ef9ebfc" Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.258031 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-cf99c678f-jn9zw" event={"ID":"aba839b0-d1ee-454e-b138-4e3656ea150d","Type":"ContainerStarted","Data":"5eb9c13c48d522acbe74d7873977bcb5be8fef166584049119901bad7725e2ad"} Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.260455 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-64db6967f8-bvglp" event={"ID":"b80a0b4d-87b7-4185-94b6-4524d830f149","Type":"ContainerStarted","Data":"2532c0e71b50a0d84da1f169a5d06a5567364689e4aced5394ba39027f9bb830"} Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.283375 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-78bc7f9bd9-pvjd6" event={"ID":"e438f213-61e7-4ce1-9d68-d14e4121ba26","Type":"ContainerStarted","Data":"195d1f819cee21b5ef0b4057ff8edeb75cb90e2aac7d998206766061b9bc5df9"} Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.284927 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5fdb694969-28cxl" event={"ID":"52d0c5f6-ad2b-460d-8a8d-a9f4bc3fed06","Type":"ContainerStarted","Data":"586d0f71292b717067833494db5fdd539684a2e9c7136cfe8de553e9392c8868"} Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.289627 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:1b9074a4ce16396d8bd2d30a475fc8c2f004f75a023e3eef8950661e89c0bcc6\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5fdb694969-28cxl" podUID="52d0c5f6-ad2b-460d-8a8d-a9f4bc3fed06" Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.290447 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-75684d597f-q59sz" event={"ID":"ab4168c1-442a-4218-bd19-a0194e2b4e59","Type":"ContainerStarted","Data":"d91845a3225b759ff647f309ffdc6e851215103272f394d2fbb4c85757b2280d"} Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.292257 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:9f73c84a9581b5739d8da333c7b64403d7b7ca284b22c624d0effe07f3d2819c\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-75684d597f-q59sz" podUID="ab4168c1-442a-4218-bd19-a0194e2b4e59" Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.292693 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-74b6b5dc96-lsm2z" event={"ID":"ec68c3b0-bb17-4c88-a478-e13e49063c7f","Type":"ContainerStarted","Data":"39d68d6cef62c13ea7cf73cf87c0fc2a9d9e0d63532a1bedcd6d4f779b1054f9"} Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.293530 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-54688575f-5w6ls" event={"ID":"5b1250ff-45df-43fc-a9fc-fa364b823c16","Type":"ContainerStarted","Data":"dcf69f6f5d6fd37f699eafc054846188e45cec05f24b155859f3df7f462ac0df"} Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.293973 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:172f24bd4603ac3498536a8a2c8fffb07cf9113dd52bc132778ea0aa275c6b84\\\"\"" pod="openstack-operators/nova-operator-controller-manager-74b6b5dc96-lsm2z" podUID="ec68c3b0-bb17-4c88-a478-e13e49063c7f" Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.294401 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:b242403a27609ac87a0ed3a7dd788aceaf8f3da3620981cf5e000d56862d77a4\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-54688575f-5w6ls" podUID="5b1250ff-45df-43fc-a9fc-fa364b823c16" Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.294789 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-67d996989d-wfwll" event={"ID":"5ff960a3-98d8-4d3f-9116-2a0785aefb2e","Type":"ContainerStarted","Data":"0ff5161a9b063a7d1d5e0ead23dc5e7172d6d5b65b8f897c5a167a19f5ea4dbc"} Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.297480 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-545456dc4-5x4jc" event={"ID":"349bcf77-0fc4-4048-a66d-696798c3a6d4","Type":"ContainerStarted","Data":"42e0b3aa61be7603eae3015789b10851982111886ec906b994f0d3e553504c91"} Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.308609 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-556b8b874-fpnbv" event={"ID":"a9932342-3a2d-4621-b4f1-048d92eef4c2","Type":"ContainerStarted","Data":"8b1e60d449b6859fbab9af12b86d4b686aa08dd34246743ca62cda8a3baa155e"} Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.318992 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-55b5ff4dbb-s9mn2" event={"ID":"4b06e9fa-a6b4-4277-a4ea-d0724bc40002","Type":"ContainerStarted","Data":"f282e9bf1a21cffb6ce1fd7f3e9800597cec1ec16f3689ea889c774f5eeac579"} Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.324663 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:9d03f03aa9a460f1fcac8875064808c03e4ecd0388873bbfb9c7dc58331f3968\\\"\"" pod="openstack-operators/test-operator-controller-manager-55b5ff4dbb-s9mn2" podUID="4b06e9fa-a6b4-4277-a4ea-d0724bc40002" Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.326200 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-bccc79885-p8gxp" event={"ID":"dfcc34b3-13fc-4a7f-ab38-45744608591e","Type":"ContainerStarted","Data":"4005a1c0d435aad4bf55e4b617ed1f0363bedce80d72a3abfe696676ea2e98be"} Feb 27 08:46:44 crc kubenswrapper[4906]: E0227 08:46:44.335409 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:06311600a491c689493552e7ff26e36df740fa4e7c143fca874bef19f24afb97\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-bccc79885-p8gxp" podUID="dfcc34b3-13fc-4a7f-ab38-45744608591e" Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.335540 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2mj9c" event={"ID":"744c08ea-1798-4480-9a26-d5ec4c3843e2","Type":"ContainerStarted","Data":"0c8358291a2bb9a6d3b52e88f732a98f42a4eed6659bff819b96b05d4ad08d2f"} Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.338623 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-55ffd4876b-wtrsc" event={"ID":"6d51bdfc-e48a-44ff-a56c-9400e320fa7f","Type":"ContainerStarted","Data":"366d315455eb92db908be02f59a62a0e34d03368a0c9f78c0f3ce7a1d9fd48e1"} Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.342291 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-648564c9fc-p8w7q" event={"ID":"39722076-5ed2-4e53-bb1d-d2a8bc73b825","Type":"ContainerStarted","Data":"ff55cd2e5c03423d846322d4e1a546d5f896cdc00980c621134e871f17f92566"} Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.343486 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-5d87c9d997-8q7nm" event={"ID":"1bb840aa-a248-4f16-8b8e-2710d728a7f8","Type":"ContainerStarted","Data":"568b769e701eb8d4dd01a6dfabf0c3765a4b41c3613d1ac408bc5e1c53d2dfcc"} Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.344919 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6db6876945-cq6ns" event={"ID":"b54e196b-1f4b-4121-821f-a6751aef49ed","Type":"ContainerStarted","Data":"90019dd8cea23779349ec55f463c0fc1488db41ff8e8b6ba66d0b2f1a75b835c"} Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.352479 4906 generic.go:334] "Generic (PLEG): container finished" podID="38b40d50-8598-4279-a798-c499b5152457" containerID="29901429b7a8eff95f56596947e1db91c65bfc6117402c9737a8c6a5c09f5c28" exitCode=0 Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.352600 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xpzfp" event={"ID":"38b40d50-8598-4279-a798-c499b5152457","Type":"ContainerDied","Data":"29901429b7a8eff95f56596947e1db91c65bfc6117402c9737a8c6a5c09f5c28"} Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.371640 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pppgx" event={"ID":"37c2354e-5123-4644-ac6e-416ab22ecde4","Type":"ContainerStarted","Data":"4b9731f638fe460ac25a6dac64ba7645879fa9abc85537900131b98a7b2e5148"} Feb 27 08:46:44 crc kubenswrapper[4906]: I0227 08:46:44.373445 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-5d86c7ddb7-5h7k9" event={"ID":"c12e7f2b-60e4-4bb5-9b11-3ae935c649c2","Type":"ContainerStarted","Data":"f7b4dbc1b6251820e14f21c1cc21afe54a9f4055f2364da43ed98502eef53632"} Feb 27 08:46:45 crc kubenswrapper[4906]: I0227 08:46:45.111608 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xpzfp" Feb 27 08:46:45 crc kubenswrapper[4906]: I0227 08:46:45.135492 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w5sg9\" (UniqueName: \"kubernetes.io/projected/38b40d50-8598-4279-a798-c499b5152457-kube-api-access-w5sg9\") pod \"38b40d50-8598-4279-a798-c499b5152457\" (UID: \"38b40d50-8598-4279-a798-c499b5152457\") " Feb 27 08:46:45 crc kubenswrapper[4906]: I0227 08:46:45.135587 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38b40d50-8598-4279-a798-c499b5152457-catalog-content\") pod \"38b40d50-8598-4279-a798-c499b5152457\" (UID: \"38b40d50-8598-4279-a798-c499b5152457\") " Feb 27 08:46:45 crc kubenswrapper[4906]: I0227 08:46:45.135641 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38b40d50-8598-4279-a798-c499b5152457-utilities\") pod \"38b40d50-8598-4279-a798-c499b5152457\" (UID: \"38b40d50-8598-4279-a798-c499b5152457\") " Feb 27 08:46:45 crc kubenswrapper[4906]: I0227 08:46:45.137202 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38b40d50-8598-4279-a798-c499b5152457-utilities" (OuterVolumeSpecName: "utilities") pod "38b40d50-8598-4279-a798-c499b5152457" (UID: "38b40d50-8598-4279-a798-c499b5152457"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:46:45 crc kubenswrapper[4906]: I0227 08:46:45.154470 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38b40d50-8598-4279-a798-c499b5152457-kube-api-access-w5sg9" (OuterVolumeSpecName: "kube-api-access-w5sg9") pod "38b40d50-8598-4279-a798-c499b5152457" (UID: "38b40d50-8598-4279-a798-c499b5152457"). InnerVolumeSpecName "kube-api-access-w5sg9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:46:45 crc kubenswrapper[4906]: I0227 08:46:45.218239 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38b40d50-8598-4279-a798-c499b5152457-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "38b40d50-8598-4279-a798-c499b5152457" (UID: "38b40d50-8598-4279-a798-c499b5152457"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:46:45 crc kubenswrapper[4906]: I0227 08:46:45.237691 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w5sg9\" (UniqueName: \"kubernetes.io/projected/38b40d50-8598-4279-a798-c499b5152457-kube-api-access-w5sg9\") on node \"crc\" DevicePath \"\"" Feb 27 08:46:45 crc kubenswrapper[4906]: I0227 08:46:45.237734 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/38b40d50-8598-4279-a798-c499b5152457-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 08:46:45 crc kubenswrapper[4906]: I0227 08:46:45.237764 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/38b40d50-8598-4279-a798-c499b5152457-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 08:46:45 crc kubenswrapper[4906]: I0227 08:46:45.384147 4906 generic.go:334] "Generic (PLEG): container finished" podID="744c08ea-1798-4480-9a26-d5ec4c3843e2" containerID="c971c8cab16e35890a503e3d8bff52f5bf33485e8091ce2605329844e3c5634b" exitCode=0 Feb 27 08:46:45 crc kubenswrapper[4906]: I0227 08:46:45.384212 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2mj9c" event={"ID":"744c08ea-1798-4480-9a26-d5ec4c3843e2","Type":"ContainerDied","Data":"c971c8cab16e35890a503e3d8bff52f5bf33485e8091ce2605329844e3c5634b"} Feb 27 08:46:45 crc kubenswrapper[4906]: I0227 08:46:45.403686 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xpzfp" Feb 27 08:46:45 crc kubenswrapper[4906]: I0227 08:46:45.403686 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xpzfp" event={"ID":"38b40d50-8598-4279-a798-c499b5152457","Type":"ContainerDied","Data":"a482787af5fd7ea233e747c21c36e8c810833b22ab072c02a2d9172f1efd6d55"} Feb 27 08:46:45 crc kubenswrapper[4906]: I0227 08:46:45.403753 4906 scope.go:117] "RemoveContainer" containerID="29901429b7a8eff95f56596947e1db91c65bfc6117402c9737a8c6a5c09f5c28" Feb 27 08:46:45 crc kubenswrapper[4906]: E0227 08:46:45.406128 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:9f73c84a9581b5739d8da333c7b64403d7b7ca284b22c624d0effe07f3d2819c\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-75684d597f-q59sz" podUID="ab4168c1-442a-4218-bd19-a0194e2b4e59" Feb 27 08:46:45 crc kubenswrapper[4906]: E0227 08:46:45.406597 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:1b9074a4ce16396d8bd2d30a475fc8c2f004f75a023e3eef8950661e89c0bcc6\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-5fdb694969-28cxl" podUID="52d0c5f6-ad2b-460d-8a8d-a9f4bc3fed06" Feb 27 08:46:45 crc kubenswrapper[4906]: E0227 08:46:45.406644 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/neutron-operator@sha256:b242403a27609ac87a0ed3a7dd788aceaf8f3da3620981cf5e000d56862d77a4\\\"\"" pod="openstack-operators/neutron-operator-controller-manager-54688575f-5w6ls" podUID="5b1250ff-45df-43fc-a9fc-fa364b823c16" Feb 27 08:46:45 crc kubenswrapper[4906]: E0227 08:46:45.406681 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:f309cdea8084a4b1e8cbcd732d6e250fd93c55cfd1b48ba9026907c8591faab7\\\"\"" pod="openstack-operators/swift-operator-controller-manager-9b9ff9f4d-sdhtz" podUID="470d801d-7d83-4b70-ba47-f2d93ef9ebfc" Feb 27 08:46:45 crc kubenswrapper[4906]: E0227 08:46:45.406719 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:9d03f03aa9a460f1fcac8875064808c03e4ecd0388873bbfb9c7dc58331f3968\\\"\"" pod="openstack-operators/test-operator-controller-manager-55b5ff4dbb-s9mn2" podUID="4b06e9fa-a6b4-4277-a4ea-d0724bc40002" Feb 27 08:46:45 crc kubenswrapper[4906]: E0227 08:46:45.407015 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:06311600a491c689493552e7ff26e36df740fa4e7c143fca874bef19f24afb97\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-bccc79885-p8gxp" podUID="dfcc34b3-13fc-4a7f-ab38-45744608591e" Feb 27 08:46:45 crc kubenswrapper[4906]: E0227 08:46:45.407185 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:172f24bd4603ac3498536a8a2c8fffb07cf9113dd52bc132778ea0aa275c6b84\\\"\"" pod="openstack-operators/nova-operator-controller-manager-74b6b5dc96-lsm2z" podUID="ec68c3b0-bb17-4c88-a478-e13e49063c7f" Feb 27 08:46:45 crc kubenswrapper[4906]: I0227 08:46:45.441340 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fa05256a-5601-4ac3-873d-eb58bd232401-cert\") pod \"infra-operator-controller-manager-f7fcc58b9-xs4j4\" (UID: \"fa05256a-5601-4ac3-873d-eb58bd232401\") " pod="openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4" Feb 27 08:46:45 crc kubenswrapper[4906]: I0227 08:46:45.442050 4906 scope.go:117] "RemoveContainer" containerID="6dcc794a6141c862bd8de6b7a60d11f055d475e4049ce603359ff9dace4bcb6a" Feb 27 08:46:45 crc kubenswrapper[4906]: E0227 08:46:45.443338 4906 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 27 08:46:45 crc kubenswrapper[4906]: E0227 08:46:45.443388 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fa05256a-5601-4ac3-873d-eb58bd232401-cert podName:fa05256a-5601-4ac3-873d-eb58bd232401 nodeName:}" failed. No retries permitted until 2026-02-27 08:46:49.443373708 +0000 UTC m=+1107.837775318 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fa05256a-5601-4ac3-873d-eb58bd232401-cert") pod "infra-operator-controller-manager-f7fcc58b9-xs4j4" (UID: "fa05256a-5601-4ac3-873d-eb58bd232401") : secret "infra-operator-webhook-server-cert" not found Feb 27 08:46:45 crc kubenswrapper[4906]: I0227 08:46:45.535100 4906 scope.go:117] "RemoveContainer" containerID="ad1c818a9cf132f2f7d2fd162f8a65bf1fdc14c3e23220df274319f765d65fb8" Feb 27 08:46:45 crc kubenswrapper[4906]: I0227 08:46:45.627995 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xpzfp"] Feb 27 08:46:45 crc kubenswrapper[4906]: I0227 08:46:45.635546 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xpzfp"] Feb 27 08:46:46 crc kubenswrapper[4906]: I0227 08:46:46.057507 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/775eec2f-3f17-4413-b454-0248b5cb7817-cert\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b\" (UID: \"775eec2f-3f17-4413-b454-0248b5cb7817\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" Feb 27 08:46:46 crc kubenswrapper[4906]: E0227 08:46:46.057716 4906 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 27 08:46:46 crc kubenswrapper[4906]: E0227 08:46:46.057799 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/775eec2f-3f17-4413-b454-0248b5cb7817-cert podName:775eec2f-3f17-4413-b454-0248b5cb7817 nodeName:}" failed. No retries permitted until 2026-02-27 08:46:50.057776364 +0000 UTC m=+1108.452177984 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/775eec2f-3f17-4413-b454-0248b5cb7817-cert") pod "openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" (UID: "775eec2f-3f17-4413-b454-0248b5cb7817") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 27 08:46:46 crc kubenswrapper[4906]: I0227 08:46:46.272974 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:46:46 crc kubenswrapper[4906]: I0227 08:46:46.273089 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:46:46 crc kubenswrapper[4906]: E0227 08:46:46.273253 4906 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 27 08:46:46 crc kubenswrapper[4906]: E0227 08:46:46.273313 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs podName:c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c nodeName:}" failed. No retries permitted until 2026-02-27 08:46:50.273295231 +0000 UTC m=+1108.667696841 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs") pod "openstack-operator-controller-manager-6dfddd8f7d-xcp5b" (UID: "c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c") : secret "metrics-server-cert" not found Feb 27 08:46:46 crc kubenswrapper[4906]: E0227 08:46:46.273557 4906 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 27 08:46:46 crc kubenswrapper[4906]: E0227 08:46:46.273712 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs podName:c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c nodeName:}" failed. No retries permitted until 2026-02-27 08:46:50.27364397 +0000 UTC m=+1108.668045760 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs") pod "openstack-operator-controller-manager-6dfddd8f7d-xcp5b" (UID: "c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c") : secret "webhook-server-cert" not found Feb 27 08:46:46 crc kubenswrapper[4906]: I0227 08:46:46.575402 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38b40d50-8598-4279-a798-c499b5152457" path="/var/lib/kubelet/pods/38b40d50-8598-4279-a798-c499b5152457/volumes" Feb 27 08:46:47 crc kubenswrapper[4906]: I0227 08:46:47.502254 4906 generic.go:334] "Generic (PLEG): container finished" podID="744c08ea-1798-4480-9a26-d5ec4c3843e2" containerID="ae4f741721823a75ef4de5459e64f5846d9513f2c4c146cabb9895a90d84e1d6" exitCode=0 Feb 27 08:46:47 crc kubenswrapper[4906]: I0227 08:46:47.502758 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2mj9c" event={"ID":"744c08ea-1798-4480-9a26-d5ec4c3843e2","Type":"ContainerDied","Data":"ae4f741721823a75ef4de5459e64f5846d9513f2c4c146cabb9895a90d84e1d6"} Feb 27 08:46:49 crc kubenswrapper[4906]: I0227 08:46:49.529864 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fa05256a-5601-4ac3-873d-eb58bd232401-cert\") pod \"infra-operator-controller-manager-f7fcc58b9-xs4j4\" (UID: \"fa05256a-5601-4ac3-873d-eb58bd232401\") " pod="openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4" Feb 27 08:46:49 crc kubenswrapper[4906]: E0227 08:46:49.530054 4906 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Feb 27 08:46:49 crc kubenswrapper[4906]: E0227 08:46:49.530541 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fa05256a-5601-4ac3-873d-eb58bd232401-cert podName:fa05256a-5601-4ac3-873d-eb58bd232401 nodeName:}" failed. No retries permitted until 2026-02-27 08:46:57.530519609 +0000 UTC m=+1115.924921219 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/fa05256a-5601-4ac3-873d-eb58bd232401-cert") pod "infra-operator-controller-manager-f7fcc58b9-xs4j4" (UID: "fa05256a-5601-4ac3-873d-eb58bd232401") : secret "infra-operator-webhook-server-cert" not found Feb 27 08:46:50 crc kubenswrapper[4906]: I0227 08:46:50.149173 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/775eec2f-3f17-4413-b454-0248b5cb7817-cert\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b\" (UID: \"775eec2f-3f17-4413-b454-0248b5cb7817\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" Feb 27 08:46:50 crc kubenswrapper[4906]: E0227 08:46:50.149385 4906 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 27 08:46:50 crc kubenswrapper[4906]: E0227 08:46:50.149643 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/775eec2f-3f17-4413-b454-0248b5cb7817-cert podName:775eec2f-3f17-4413-b454-0248b5cb7817 nodeName:}" failed. No retries permitted until 2026-02-27 08:46:58.149579358 +0000 UTC m=+1116.543980988 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/775eec2f-3f17-4413-b454-0248b5cb7817-cert") pod "openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" (UID: "775eec2f-3f17-4413-b454-0248b5cb7817") : secret "openstack-baremetal-operator-webhook-server-cert" not found Feb 27 08:46:50 crc kubenswrapper[4906]: I0227 08:46:50.354185 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:46:50 crc kubenswrapper[4906]: I0227 08:46:50.354338 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:46:50 crc kubenswrapper[4906]: E0227 08:46:50.354424 4906 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 27 08:46:50 crc kubenswrapper[4906]: E0227 08:46:50.354522 4906 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 27 08:46:50 crc kubenswrapper[4906]: E0227 08:46:50.354531 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs podName:c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c nodeName:}" failed. No retries permitted until 2026-02-27 08:46:58.354503276 +0000 UTC m=+1116.748904886 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs") pod "openstack-operator-controller-manager-6dfddd8f7d-xcp5b" (UID: "c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c") : secret "webhook-server-cert" not found Feb 27 08:46:50 crc kubenswrapper[4906]: E0227 08:46:50.354693 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs podName:c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c nodeName:}" failed. No retries permitted until 2026-02-27 08:46:58.35463556 +0000 UTC m=+1116.749037170 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs") pod "openstack-operator-controller-manager-6dfddd8f7d-xcp5b" (UID: "c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c") : secret "metrics-server-cert" not found Feb 27 08:46:54 crc kubenswrapper[4906]: I0227 08:46:54.844858 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:46:54 crc kubenswrapper[4906]: I0227 08:46:54.845245 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:46:56 crc kubenswrapper[4906]: I0227 08:46:56.537136 4906 scope.go:117] "RemoveContainer" containerID="dd5f3adb691601d4137551a08699ae2905a09d9154300709a257b60df0bdbea9" Feb 27 08:46:57 crc kubenswrapper[4906]: I0227 08:46:57.573180 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fa05256a-5601-4ac3-873d-eb58bd232401-cert\") pod \"infra-operator-controller-manager-f7fcc58b9-xs4j4\" (UID: \"fa05256a-5601-4ac3-873d-eb58bd232401\") " pod="openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4" Feb 27 08:46:57 crc kubenswrapper[4906]: I0227 08:46:57.588755 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/fa05256a-5601-4ac3-873d-eb58bd232401-cert\") pod \"infra-operator-controller-manager-f7fcc58b9-xs4j4\" (UID: \"fa05256a-5601-4ac3-873d-eb58bd232401\") " pod="openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4" Feb 27 08:46:57 crc kubenswrapper[4906]: I0227 08:46:57.605640 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4" Feb 27 08:46:58 crc kubenswrapper[4906]: I0227 08:46:58.185437 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/775eec2f-3f17-4413-b454-0248b5cb7817-cert\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b\" (UID: \"775eec2f-3f17-4413-b454-0248b5cb7817\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" Feb 27 08:46:58 crc kubenswrapper[4906]: I0227 08:46:58.191590 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/775eec2f-3f17-4413-b454-0248b5cb7817-cert\") pod \"openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b\" (UID: \"775eec2f-3f17-4413-b454-0248b5cb7817\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" Feb 27 08:46:58 crc kubenswrapper[4906]: I0227 08:46:58.315916 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" Feb 27 08:46:58 crc kubenswrapper[4906]: I0227 08:46:58.389521 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:46:58 crc kubenswrapper[4906]: I0227 08:46:58.389641 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:46:58 crc kubenswrapper[4906]: E0227 08:46:58.389817 4906 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Feb 27 08:46:58 crc kubenswrapper[4906]: E0227 08:46:58.389918 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs podName:c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c nodeName:}" failed. No retries permitted until 2026-02-27 08:47:14.389866025 +0000 UTC m=+1132.784267635 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs") pod "openstack-operator-controller-manager-6dfddd8f7d-xcp5b" (UID: "c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c") : secret "metrics-server-cert" not found Feb 27 08:46:58 crc kubenswrapper[4906]: E0227 08:46:58.390374 4906 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Feb 27 08:46:58 crc kubenswrapper[4906]: E0227 08:46:58.390418 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs podName:c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c nodeName:}" failed. No retries permitted until 2026-02-27 08:47:14.390394839 +0000 UTC m=+1132.784796449 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs") pod "openstack-operator-controller-manager-6dfddd8f7d-xcp5b" (UID: "c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c") : secret "webhook-server-cert" not found Feb 27 08:47:01 crc kubenswrapper[4906]: E0227 08:47:01.494052 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/glance-operator@sha256:81e43c058d9af1d3bc31704010c630bc2a574c2ee388aa0ffe8c7b9621a7d051" Feb 27 08:47:01 crc kubenswrapper[4906]: E0227 08:47:01.494643 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/glance-operator@sha256:81e43c058d9af1d3bc31704010c630bc2a574c2ee388aa0ffe8c7b9621a7d051,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-n8w9h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-operator-controller-manager-64db6967f8-bvglp_openstack-operators(b80a0b4d-87b7-4185-94b6-4524d830f149): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:47:01 crc kubenswrapper[4906]: E0227 08:47:01.495865 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/glance-operator-controller-manager-64db6967f8-bvglp" podUID="b80a0b4d-87b7-4185-94b6-4524d830f149" Feb 27 08:47:01 crc kubenswrapper[4906]: E0227 08:47:01.619623 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/glance-operator@sha256:81e43c058d9af1d3bc31704010c630bc2a574c2ee388aa0ffe8c7b9621a7d051\\\"\"" pod="openstack-operators/glance-operator-controller-manager-64db6967f8-bvglp" podUID="b80a0b4d-87b7-4185-94b6-4524d830f149" Feb 27 08:47:02 crc kubenswrapper[4906]: E0227 08:47:02.119286 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:bb939885bd04593ad03af901adb77ee2a2d18529b328c23288c7cc7a2ba5282e" Feb 27 08:47:02 crc kubenswrapper[4906]: E0227 08:47:02.119565 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:bb939885bd04593ad03af901adb77ee2a2d18529b328c23288c7cc7a2ba5282e,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-clfnv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-648564c9fc-p8w7q_openstack-operators(39722076-5ed2-4e53-bb1d-d2a8bc73b825): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:47:02 crc kubenswrapper[4906]: E0227 08:47:02.120827 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-648564c9fc-p8w7q" podUID="39722076-5ed2-4e53-bb1d-d2a8bc73b825" Feb 27 08:47:02 crc kubenswrapper[4906]: E0227 08:47:02.633030 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:bb939885bd04593ad03af901adb77ee2a2d18529b328c23288c7cc7a2ba5282e\\\"\"" pod="openstack-operators/placement-operator-controller-manager-648564c9fc-p8w7q" podUID="39722076-5ed2-4e53-bb1d-d2a8bc73b825" Feb 27 08:47:04 crc kubenswrapper[4906]: E0227 08:47:04.333730 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:3f9b0446a124745439306dc3bb7faec8c02c0b6be33f788b9d455fa57fb60120" Feb 27 08:47:04 crc kubenswrapper[4906]: E0227 08:47:04.333971 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:3f9b0446a124745439306dc3bb7faec8c02c0b6be33f788b9d455fa57fb60120,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-lpgql,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-6db6876945-cq6ns_openstack-operators(b54e196b-1f4b-4121-821f-a6751aef49ed): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:47:04 crc kubenswrapper[4906]: E0227 08:47:04.335425 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/barbican-operator-controller-manager-6db6876945-cq6ns" podUID="b54e196b-1f4b-4121-821f-a6751aef49ed" Feb 27 08:47:04 crc kubenswrapper[4906]: E0227 08:47:04.647351 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/barbican-operator@sha256:3f9b0446a124745439306dc3bb7faec8c02c0b6be33f788b9d455fa57fb60120\\\"\"" pod="openstack-operators/barbican-operator-controller-manager-6db6876945-cq6ns" podUID="b54e196b-1f4b-4121-821f-a6751aef49ed" Feb 27 08:47:04 crc kubenswrapper[4906]: E0227 08:47:04.978836 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:e41dfadd2c3bbcae29f8c43cd2feea6724a48cdef127d65d1d37816bb9945a01" Feb 27 08:47:04 crc kubenswrapper[4906]: E0227 08:47:04.979617 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:e41dfadd2c3bbcae29f8c43cd2feea6724a48cdef127d65d1d37816bb9945a01,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fsgtp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-545456dc4-5x4jc_openstack-operators(349bcf77-0fc4-4048-a66d-696798c3a6d4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:47:04 crc kubenswrapper[4906]: E0227 08:47:04.980854 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-545456dc4-5x4jc" podUID="349bcf77-0fc4-4048-a66d-696798c3a6d4" Feb 27 08:47:05 crc kubenswrapper[4906]: E0227 08:47:05.600117 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:ee642fcf655f9897d480460008cba2e98b497d3ffdf7ab1d48ea460eb20c2053" Feb 27 08:47:05 crc kubenswrapper[4906]: E0227 08:47:05.600408 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:ee642fcf655f9897d480460008cba2e98b497d3ffdf7ab1d48ea460eb20c2053,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-6n4qm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-cf99c678f-jn9zw_openstack-operators(aba839b0-d1ee-454e-b138-4e3656ea150d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:47:05 crc kubenswrapper[4906]: E0227 08:47:05.601706 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-cf99c678f-jn9zw" podUID="aba839b0-d1ee-454e-b138-4e3656ea150d" Feb 27 08:47:05 crc kubenswrapper[4906]: E0227 08:47:05.677652 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ironic-operator@sha256:e41dfadd2c3bbcae29f8c43cd2feea6724a48cdef127d65d1d37816bb9945a01\\\"\"" pod="openstack-operators/ironic-operator-controller-manager-545456dc4-5x4jc" podUID="349bcf77-0fc4-4048-a66d-696798c3a6d4" Feb 27 08:47:05 crc kubenswrapper[4906]: E0227 08:47:05.677496 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/heat-operator@sha256:ee642fcf655f9897d480460008cba2e98b497d3ffdf7ab1d48ea460eb20c2053\\\"\"" pod="openstack-operators/heat-operator-controller-manager-cf99c678f-jn9zw" podUID="aba839b0-d1ee-454e-b138-4e3656ea150d" Feb 27 08:47:06 crc kubenswrapper[4906]: E0227 08:47:06.938852 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:2d59045b8d8e6f9c5483c4fdda7c5057218d553200dc4bcf26789980ac1d9abd" Feb 27 08:47:06 crc kubenswrapper[4906]: E0227 08:47:06.939124 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:2d59045b8d8e6f9c5483c4fdda7c5057218d553200dc4bcf26789980ac1d9abd,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-szf6s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-5d86c7ddb7-5h7k9_openstack-operators(c12e7f2b-60e4-4bb5-9b11-3ae935c649c2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:47:06 crc kubenswrapper[4906]: E0227 08:47:06.940301 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-5d86c7ddb7-5h7k9" podUID="c12e7f2b-60e4-4bb5-9b11-3ae935c649c2" Feb 27 08:47:07 crc kubenswrapper[4906]: E0227 08:47:07.691445 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:2d59045b8d8e6f9c5483c4fdda7c5057218d553200dc4bcf26789980ac1d9abd\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-5d86c7ddb7-5h7k9" podUID="c12e7f2b-60e4-4bb5-9b11-3ae935c649c2" Feb 27 08:47:08 crc kubenswrapper[4906]: E0227 08:47:08.951518 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/designate-operator@sha256:508859beb0e5b69169393dbb0039dc03a9d4ba05f16f6ff74f9b25e19d446214" Feb 27 08:47:08 crc kubenswrapper[4906]: E0227 08:47:08.951768 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/designate-operator@sha256:508859beb0e5b69169393dbb0039dc03a9d4ba05f16f6ff74f9b25e19d446214,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-jd6rt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod designate-operator-controller-manager-5d87c9d997-8q7nm_openstack-operators(1bb840aa-a248-4f16-8b8e-2710d728a7f8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:47:08 crc kubenswrapper[4906]: E0227 08:47:08.953947 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/designate-operator-controller-manager-5d87c9d997-8q7nm" podUID="1bb840aa-a248-4f16-8b8e-2710d728a7f8" Feb 27 08:47:09 crc kubenswrapper[4906]: E0227 08:47:09.776714 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/designate-operator@sha256:508859beb0e5b69169393dbb0039dc03a9d4ba05f16f6ff74f9b25e19d446214\\\"\"" pod="openstack-operators/designate-operator-controller-manager-5d87c9d997-8q7nm" podUID="1bb840aa-a248-4f16-8b8e-2710d728a7f8" Feb 27 08:47:09 crc kubenswrapper[4906]: E0227 08:47:09.782793 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:71f2ab3bb41d1743287a3270dd49e32192b347d8ba7353d2250cbd7e8528219b" Feb 27 08:47:09 crc kubenswrapper[4906]: E0227 08:47:09.782990 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:71f2ab3bb41d1743287a3270dd49e32192b347d8ba7353d2250cbd7e8528219b,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-pqsj2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-556b8b874-fpnbv_openstack-operators(a9932342-3a2d-4621-b4f1-048d92eef4c2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:47:09 crc kubenswrapper[4906]: E0227 08:47:09.784132 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-556b8b874-fpnbv" podUID="a9932342-3a2d-4621-b4f1-048d92eef4c2" Feb 27 08:47:10 crc kubenswrapper[4906]: E0227 08:47:10.244787 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Feb 27 08:47:10 crc kubenswrapper[4906]: E0227 08:47:10.245147 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-p9wfl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-pppgx_openstack-operators(37c2354e-5123-4644-ac6e-416ab22ecde4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:47:10 crc kubenswrapper[4906]: E0227 08:47:10.246332 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pppgx" podUID="37c2354e-5123-4644-ac6e-416ab22ecde4" Feb 27 08:47:10 crc kubenswrapper[4906]: E0227 08:47:10.727105 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/mariadb-operator@sha256:71f2ab3bb41d1743287a3270dd49e32192b347d8ba7353d2250cbd7e8528219b\\\"\"" pod="openstack-operators/mariadb-operator-controller-manager-556b8b874-fpnbv" podUID="a9932342-3a2d-4621-b4f1-048d92eef4c2" Feb 27 08:47:10 crc kubenswrapper[4906]: E0227 08:47:10.727564 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pppgx" podUID="37c2354e-5123-4644-ac6e-416ab22ecde4" Feb 27 08:47:13 crc kubenswrapper[4906]: E0227 08:47:13.700832 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:12fa31d2a2dfe1a832c6a2c0eb58876a3a62595a1a1f49b13c2a1f9b6d378735" Feb 27 08:47:13 crc kubenswrapper[4906]: E0227 08:47:13.701587 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:12fa31d2a2dfe1a832c6a2c0eb58876a3a62595a1a1f49b13c2a1f9b6d378735,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hrbp6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-55ffd4876b-wtrsc_openstack-operators(6d51bdfc-e48a-44ff-a56c-9400e320fa7f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:47:13 crc kubenswrapper[4906]: E0227 08:47:13.702798 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-55ffd4876b-wtrsc" podUID="6d51bdfc-e48a-44ff-a56c-9400e320fa7f" Feb 27 08:47:13 crc kubenswrapper[4906]: E0227 08:47:13.778788 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:12fa31d2a2dfe1a832c6a2c0eb58876a3a62595a1a1f49b13c2a1f9b6d378735\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-55ffd4876b-wtrsc" podUID="6d51bdfc-e48a-44ff-a56c-9400e320fa7f" Feb 27 08:47:14 crc kubenswrapper[4906]: I0227 08:47:14.391237 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:47:14 crc kubenswrapper[4906]: I0227 08:47:14.391873 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:47:14 crc kubenswrapper[4906]: I0227 08:47:14.398425 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-webhook-certs\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:47:14 crc kubenswrapper[4906]: I0227 08:47:14.398459 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c-metrics-certs\") pod \"openstack-operator-controller-manager-6dfddd8f7d-xcp5b\" (UID: \"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c\") " pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:47:14 crc kubenswrapper[4906]: I0227 08:47:14.501485 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:47:14 crc kubenswrapper[4906]: I0227 08:47:14.793424 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-9b9ff9f4d-sdhtz" event={"ID":"470d801d-7d83-4b70-ba47-f2d93ef9ebfc","Type":"ContainerStarted","Data":"89085f486de5778989bf5a11977a379ecb10bc91b7c562d7da4d7da87c995852"} Feb 27 08:47:14 crc kubenswrapper[4906]: I0227 08:47:14.795026 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-9b9ff9f4d-sdhtz" Feb 27 08:47:14 crc kubenswrapper[4906]: I0227 08:47:14.834426 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4"] Feb 27 08:47:14 crc kubenswrapper[4906]: I0227 08:47:14.835360 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-9b9ff9f4d-sdhtz" podStartSLOduration=3.667176969 podStartE2EDuration="33.835332464s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:46:44.134704648 +0000 UTC m=+1102.529106258" lastFinishedPulling="2026-02-27 08:47:14.302860143 +0000 UTC m=+1132.697261753" observedRunningTime="2026-02-27 08:47:14.821325296 +0000 UTC m=+1133.215726906" watchObservedRunningTime="2026-02-27 08:47:14.835332464 +0000 UTC m=+1133.229734074" Feb 27 08:47:14 crc kubenswrapper[4906]: I0227 08:47:14.841003 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-55d77d7b5c-2t8hl" event={"ID":"d22e292e-57c3-4fc3-8730-813b100aa442","Type":"ContainerStarted","Data":"2bf4c36f7cbbb5a2dde782bb3fec162ad3d516505d1f7130c3cbbee1e2d89426"} Feb 27 08:47:14 crc kubenswrapper[4906]: I0227 08:47:14.841816 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-55d77d7b5c-2t8hl" Feb 27 08:47:14 crc kubenswrapper[4906]: I0227 08:47:14.859795 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-67d996989d-wfwll" event={"ID":"5ff960a3-98d8-4d3f-9116-2a0785aefb2e","Type":"ContainerStarted","Data":"35b610e9fc44eeafe395d799a075279a0c6a9a83a53e4d5c0ae578acf605cfd4"} Feb 27 08:47:14 crc kubenswrapper[4906]: I0227 08:47:14.861232 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-67d996989d-wfwll" Feb 27 08:47:14 crc kubenswrapper[4906]: I0227 08:47:14.868304 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b"] Feb 27 08:47:14 crc kubenswrapper[4906]: I0227 08:47:14.873779 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-78bc7f9bd9-pvjd6" event={"ID":"e438f213-61e7-4ce1-9d68-d14e4121ba26","Type":"ContainerStarted","Data":"45a91f3b855f98ab2e999f72c47e35694c15e365ddbd011538e3ac02730116d8"} Feb 27 08:47:14 crc kubenswrapper[4906]: I0227 08:47:14.874950 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-78bc7f9bd9-pvjd6" Feb 27 08:47:14 crc kubenswrapper[4906]: I0227 08:47:14.881336 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2mj9c" podStartSLOduration=5.037406238 podStartE2EDuration="33.881303513s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:46:45.389111101 +0000 UTC m=+1103.783512711" lastFinishedPulling="2026-02-27 08:47:14.233008376 +0000 UTC m=+1132.627409986" observedRunningTime="2026-02-27 08:47:14.861927334 +0000 UTC m=+1133.256328944" watchObservedRunningTime="2026-02-27 08:47:14.881303513 +0000 UTC m=+1133.275705123" Feb 27 08:47:14 crc kubenswrapper[4906]: I0227 08:47:14.928754 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-67d996989d-wfwll" podStartSLOduration=6.455824006 podStartE2EDuration="33.92871973s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:46:43.820964858 +0000 UTC m=+1102.215366458" lastFinishedPulling="2026-02-27 08:47:11.293860572 +0000 UTC m=+1129.688262182" observedRunningTime="2026-02-27 08:47:14.913319055 +0000 UTC m=+1133.307720665" watchObservedRunningTime="2026-02-27 08:47:14.92871973 +0000 UTC m=+1133.323121350" Feb 27 08:47:14 crc kubenswrapper[4906]: I0227 08:47:14.969996 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-55d77d7b5c-2t8hl" podStartSLOduration=5.783076916 podStartE2EDuration="33.969968175s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:46:43.107889817 +0000 UTC m=+1101.502291427" lastFinishedPulling="2026-02-27 08:47:11.294781076 +0000 UTC m=+1129.689182686" observedRunningTime="2026-02-27 08:47:14.964233514 +0000 UTC m=+1133.358635124" watchObservedRunningTime="2026-02-27 08:47:14.969968175 +0000 UTC m=+1133.364369785" Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.016929 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-78bc7f9bd9-pvjd6" podStartSLOduration=6.568049477 podStartE2EDuration="34.016871558s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:46:43.845098412 +0000 UTC m=+1102.239500022" lastFinishedPulling="2026-02-27 08:47:11.293920493 +0000 UTC m=+1129.688322103" observedRunningTime="2026-02-27 08:47:15.006118575 +0000 UTC m=+1133.400520205" watchObservedRunningTime="2026-02-27 08:47:15.016871558 +0000 UTC m=+1133.411273178" Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.352054 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b"] Feb 27 08:47:15 crc kubenswrapper[4906]: W0227 08:47:15.363524 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc9a5ef1b_f518_41c0_a30d_d0a5d2e3321c.slice/crio-33b2096c1999336d52c5be13aba966e8df041535afc30cd13f63570fcad19d9e WatchSource:0}: Error finding container 33b2096c1999336d52c5be13aba966e8df041535afc30cd13f63570fcad19d9e: Status 404 returned error can't find the container with id 33b2096c1999336d52c5be13aba966e8df041535afc30cd13f63570fcad19d9e Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.901707 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" event={"ID":"775eec2f-3f17-4413-b454-0248b5cb7817","Type":"ContainerStarted","Data":"f31c65312e579a24c9a0af6e1fb554b5a8d06ba4ecf5bb53ccfa7487e09e1272"} Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.908650 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-74b6b5dc96-lsm2z" event={"ID":"ec68c3b0-bb17-4c88-a478-e13e49063c7f","Type":"ContainerStarted","Data":"beb9878e066fc14ad1048b929d57c8f079d144803b9d92314b1fc8d47f093667"} Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.908989 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-74b6b5dc96-lsm2z" Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.910658 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-64db6967f8-bvglp" event={"ID":"b80a0b4d-87b7-4185-94b6-4524d830f149","Type":"ContainerStarted","Data":"9edebdff13bb85c2cdbdfd13d1e31a39e83a958aa415ffcf72fbd3e7640e3350"} Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.910922 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-64db6967f8-bvglp" Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.931582 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5fdb694969-28cxl" event={"ID":"52d0c5f6-ad2b-460d-8a8d-a9f4bc3fed06","Type":"ContainerStarted","Data":"97baa86094cc25905c38100e513038a23dca9e2e7ac2325b41bca5d7297888a5"} Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.931922 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-5fdb694969-28cxl" Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.940583 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" event={"ID":"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c","Type":"ContainerStarted","Data":"5956252b5eb562a3fe90288972cf4eaf75da50698fcc60fe9d04371ca177633c"} Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.940649 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" event={"ID":"c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c","Type":"ContainerStarted","Data":"33b2096c1999336d52c5be13aba966e8df041535afc30cd13f63570fcad19d9e"} Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.941067 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.941676 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-74b6b5dc96-lsm2z" podStartSLOduration=4.635795089 podStartE2EDuration="34.941644105s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:46:44.099143343 +0000 UTC m=+1102.493544953" lastFinishedPulling="2026-02-27 08:47:14.404992359 +0000 UTC m=+1132.799393969" observedRunningTime="2026-02-27 08:47:15.940395202 +0000 UTC m=+1134.334796802" watchObservedRunningTime="2026-02-27 08:47:15.941644105 +0000 UTC m=+1134.336045715" Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.951828 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-75684d597f-q59sz" event={"ID":"ab4168c1-442a-4218-bd19-a0194e2b4e59","Type":"ContainerStarted","Data":"0a029a75d580a0e0670f27e1c5773119d63f933fec27da5eb1735cd3c2200bcb"} Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.952780 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-75684d597f-q59sz" Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.962036 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-bccc79885-p8gxp" event={"ID":"dfcc34b3-13fc-4a7f-ab38-45744608591e","Type":"ContainerStarted","Data":"403eb3416c29757819dea8559ec3d4fd781fab06adf5ee50399af8e09aa1f8ab"} Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.962458 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-bccc79885-p8gxp" Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.974195 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-64db6967f8-bvglp" podStartSLOduration=4.319869341 podStartE2EDuration="34.9741668s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:46:43.761766941 +0000 UTC m=+1102.156168561" lastFinishedPulling="2026-02-27 08:47:14.4160644 +0000 UTC m=+1132.810466020" observedRunningTime="2026-02-27 08:47:15.973314368 +0000 UTC m=+1134.367715998" watchObservedRunningTime="2026-02-27 08:47:15.9741668 +0000 UTC m=+1134.368568420" Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.975733 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2mj9c" event={"ID":"744c08ea-1798-4480-9a26-d5ec4c3843e2","Type":"ContainerStarted","Data":"661192151a385a2060cc98988401590693aeaf6491cd6e2b8eeaa823341920d6"} Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.988336 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-54688575f-5w6ls" event={"ID":"5b1250ff-45df-43fc-a9fc-fa364b823c16","Type":"ContainerStarted","Data":"fe4ad000a691389f271e956d67b42e4e98480971269c7fc3f99d8d7a320d837b"} Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.988731 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-54688575f-5w6ls" Feb 27 08:47:15 crc kubenswrapper[4906]: I0227 08:47:15.991517 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4" event={"ID":"fa05256a-5601-4ac3-873d-eb58bd232401","Type":"ContainerStarted","Data":"ef39a18397b9edde6cb912fee2ba4697cdf0d88b9b42044d6269290784586bd2"} Feb 27 08:47:16 crc kubenswrapper[4906]: I0227 08:47:16.003137 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-55b5ff4dbb-s9mn2" event={"ID":"4b06e9fa-a6b4-4277-a4ea-d0724bc40002","Type":"ContainerStarted","Data":"9a6df1f889fa2a7966ced0bd6a08405e7611ea7e3300d38d964cabe063c70ed0"} Feb 27 08:47:16 crc kubenswrapper[4906]: I0227 08:47:16.029796 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-5fdb694969-28cxl" podStartSLOduration=5.465056365 podStartE2EDuration="35.029759562s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:46:44.173416266 +0000 UTC m=+1102.567817876" lastFinishedPulling="2026-02-27 08:47:13.738119453 +0000 UTC m=+1132.132521073" observedRunningTime="2026-02-27 08:47:16.020683364 +0000 UTC m=+1134.415084974" watchObservedRunningTime="2026-02-27 08:47:16.029759562 +0000 UTC m=+1134.424161172" Feb 27 08:47:16 crc kubenswrapper[4906]: I0227 08:47:16.047111 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-55b5ff4dbb-s9mn2" podStartSLOduration=4.797751178 podStartE2EDuration="35.047079608s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:46:44.091015299 +0000 UTC m=+1102.485416909" lastFinishedPulling="2026-02-27 08:47:14.340343739 +0000 UTC m=+1132.734745339" observedRunningTime="2026-02-27 08:47:16.044736226 +0000 UTC m=+1134.439137836" watchObservedRunningTime="2026-02-27 08:47:16.047079608 +0000 UTC m=+1134.441481218" Feb 27 08:47:16 crc kubenswrapper[4906]: I0227 08:47:16.083933 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-54688575f-5w6ls" podStartSLOduration=4.910344489 podStartE2EDuration="35.083905446s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:46:44.128466794 +0000 UTC m=+1102.522868404" lastFinishedPulling="2026-02-27 08:47:14.302027751 +0000 UTC m=+1132.696429361" observedRunningTime="2026-02-27 08:47:16.077911908 +0000 UTC m=+1134.472313518" watchObservedRunningTime="2026-02-27 08:47:16.083905446 +0000 UTC m=+1134.478307056" Feb 27 08:47:16 crc kubenswrapper[4906]: I0227 08:47:16.119673 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-bccc79885-p8gxp" podStartSLOduration=3.925464746 podStartE2EDuration="34.119645746s" podCreationTimestamp="2026-02-27 08:46:42 +0000 UTC" firstStartedPulling="2026-02-27 08:46:44.108407056 +0000 UTC m=+1102.502808666" lastFinishedPulling="2026-02-27 08:47:14.302588056 +0000 UTC m=+1132.696989666" observedRunningTime="2026-02-27 08:47:16.118501426 +0000 UTC m=+1134.512903036" watchObservedRunningTime="2026-02-27 08:47:16.119645746 +0000 UTC m=+1134.514047376" Feb 27 08:47:16 crc kubenswrapper[4906]: I0227 08:47:16.228536 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" podStartSLOduration=34.228513968 podStartE2EDuration="34.228513968s" podCreationTimestamp="2026-02-27 08:46:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:47:16.206573312 +0000 UTC m=+1134.600974922" watchObservedRunningTime="2026-02-27 08:47:16.228513968 +0000 UTC m=+1134.622915578" Feb 27 08:47:16 crc kubenswrapper[4906]: I0227 08:47:16.228968 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-75684d597f-q59sz" podStartSLOduration=5.038184229 podStartE2EDuration="35.22896191s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:46:44.137543542 +0000 UTC m=+1102.531945152" lastFinishedPulling="2026-02-27 08:47:14.328321223 +0000 UTC m=+1132.722722833" observedRunningTime="2026-02-27 08:47:16.167820853 +0000 UTC m=+1134.562222463" watchObservedRunningTime="2026-02-27 08:47:16.22896191 +0000 UTC m=+1134.623363520" Feb 27 08:47:17 crc kubenswrapper[4906]: I0227 08:47:17.021248 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6db6876945-cq6ns" event={"ID":"b54e196b-1f4b-4121-821f-a6751aef49ed","Type":"ContainerStarted","Data":"cf584ec980d50b1019b3bf729069ef6336bcc5bb146db83ddc1185a08a11b133"} Feb 27 08:47:17 crc kubenswrapper[4906]: I0227 08:47:17.050714 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-6db6876945-cq6ns" podStartSLOduration=3.571703228 podStartE2EDuration="36.050686697s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:46:43.833722323 +0000 UTC m=+1102.228123933" lastFinishedPulling="2026-02-27 08:47:16.312705792 +0000 UTC m=+1134.707107402" observedRunningTime="2026-02-27 08:47:17.042597154 +0000 UTC m=+1135.436998764" watchObservedRunningTime="2026-02-27 08:47:17.050686697 +0000 UTC m=+1135.445088307" Feb 27 08:47:19 crc kubenswrapper[4906]: I0227 08:47:19.037554 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-648564c9fc-p8w7q" event={"ID":"39722076-5ed2-4e53-bb1d-d2a8bc73b825","Type":"ContainerStarted","Data":"9ae0b0f289a16d0e0552b86158945ec969bf111ab8564cbd5b842e752b8eace0"} Feb 27 08:47:19 crc kubenswrapper[4906]: I0227 08:47:19.038382 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-648564c9fc-p8w7q" Feb 27 08:47:19 crc kubenswrapper[4906]: I0227 08:47:19.038798 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4" event={"ID":"fa05256a-5601-4ac3-873d-eb58bd232401","Type":"ContainerStarted","Data":"3a7d133546c63846a4ff491817907fe544047fd549f562c5fafa4eb3fd828cff"} Feb 27 08:47:19 crc kubenswrapper[4906]: I0227 08:47:19.038999 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4" Feb 27 08:47:19 crc kubenswrapper[4906]: I0227 08:47:19.040672 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-545456dc4-5x4jc" event={"ID":"349bcf77-0fc4-4048-a66d-696798c3a6d4","Type":"ContainerStarted","Data":"740225e26366d80f2f355126c4561ae770945b9d19539d85d856ac8e3f8ec949"} Feb 27 08:47:19 crc kubenswrapper[4906]: I0227 08:47:19.041532 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-545456dc4-5x4jc" Feb 27 08:47:19 crc kubenswrapper[4906]: I0227 08:47:19.050498 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" event={"ID":"775eec2f-3f17-4413-b454-0248b5cb7817","Type":"ContainerStarted","Data":"e8bd4b3539e2b974acc654ff9423f03bd18d9a9970cf49c60b9fd72c03830a17"} Feb 27 08:47:19 crc kubenswrapper[4906]: I0227 08:47:19.052151 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" Feb 27 08:47:19 crc kubenswrapper[4906]: I0227 08:47:19.075281 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-648564c9fc-p8w7q" podStartSLOduration=3.448964841 podStartE2EDuration="38.075252443s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:46:44.079311821 +0000 UTC m=+1102.473713431" lastFinishedPulling="2026-02-27 08:47:18.705599433 +0000 UTC m=+1137.100001033" observedRunningTime="2026-02-27 08:47:19.070303793 +0000 UTC m=+1137.464705413" watchObservedRunningTime="2026-02-27 08:47:19.075252443 +0000 UTC m=+1137.469654053" Feb 27 08:47:19 crc kubenswrapper[4906]: I0227 08:47:19.099262 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-545456dc4-5x4jc" podStartSLOduration=3.015894024 podStartE2EDuration="38.099236764s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:46:43.618403771 +0000 UTC m=+1102.012805381" lastFinishedPulling="2026-02-27 08:47:18.701746511 +0000 UTC m=+1137.096148121" observedRunningTime="2026-02-27 08:47:19.089270211 +0000 UTC m=+1137.483671831" watchObservedRunningTime="2026-02-27 08:47:19.099236764 +0000 UTC m=+1137.493638374" Feb 27 08:47:19 crc kubenswrapper[4906]: I0227 08:47:19.114004 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4" podStartSLOduration=34.33069402 podStartE2EDuration="38.113970791s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:47:14.917341641 +0000 UTC m=+1133.311743251" lastFinishedPulling="2026-02-27 08:47:18.700618412 +0000 UTC m=+1137.095020022" observedRunningTime="2026-02-27 08:47:19.108580079 +0000 UTC m=+1137.502981709" watchObservedRunningTime="2026-02-27 08:47:19.113970791 +0000 UTC m=+1137.508372401" Feb 27 08:47:19 crc kubenswrapper[4906]: I0227 08:47:19.150745 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" podStartSLOduration=34.463343298 podStartE2EDuration="38.150720167s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:47:15.014581268 +0000 UTC m=+1133.408982878" lastFinishedPulling="2026-02-27 08:47:18.701958137 +0000 UTC m=+1137.096359747" observedRunningTime="2026-02-27 08:47:19.14627736 +0000 UTC m=+1137.540678970" watchObservedRunningTime="2026-02-27 08:47:19.150720167 +0000 UTC m=+1137.545121777" Feb 27 08:47:21 crc kubenswrapper[4906]: I0227 08:47:21.840118 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-6db6876945-cq6ns" Feb 27 08:47:21 crc kubenswrapper[4906]: I0227 08:47:21.844101 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-6db6876945-cq6ns" Feb 27 08:47:21 crc kubenswrapper[4906]: I0227 08:47:21.867492 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-55d77d7b5c-2t8hl" Feb 27 08:47:21 crc kubenswrapper[4906]: I0227 08:47:21.941166 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-64db6967f8-bvglp" Feb 27 08:47:21 crc kubenswrapper[4906]: I0227 08:47:21.944059 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-78bc7f9bd9-pvjd6" Feb 27 08:47:22 crc kubenswrapper[4906]: I0227 08:47:22.081176 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-5d86c7ddb7-5h7k9" event={"ID":"c12e7f2b-60e4-4bb5-9b11-3ae935c649c2","Type":"ContainerStarted","Data":"e9c21e05f13ddfb95dd8e0187a58be75940b928facc08fcaad200265f9f2a232"} Feb 27 08:47:22 crc kubenswrapper[4906]: I0227 08:47:22.082022 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-5d86c7ddb7-5h7k9" Feb 27 08:47:22 crc kubenswrapper[4906]: I0227 08:47:22.100765 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-5d86c7ddb7-5h7k9" podStartSLOduration=3.929639171 podStartE2EDuration="41.100739448s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:46:43.864889333 +0000 UTC m=+1102.259290943" lastFinishedPulling="2026-02-27 08:47:21.03598961 +0000 UTC m=+1139.430391220" observedRunningTime="2026-02-27 08:47:22.09967901 +0000 UTC m=+1140.494080620" watchObservedRunningTime="2026-02-27 08:47:22.100739448 +0000 UTC m=+1140.495141058" Feb 27 08:47:22 crc kubenswrapper[4906]: I0227 08:47:22.236999 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-67d996989d-wfwll" Feb 27 08:47:22 crc kubenswrapper[4906]: I0227 08:47:22.282267 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-54688575f-5w6ls" Feb 27 08:47:22 crc kubenswrapper[4906]: I0227 08:47:22.367689 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-74b6b5dc96-lsm2z" Feb 27 08:47:22 crc kubenswrapper[4906]: I0227 08:47:22.643851 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-9b9ff9f4d-sdhtz" Feb 27 08:47:22 crc kubenswrapper[4906]: I0227 08:47:22.699331 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-75684d597f-q59sz" Feb 27 08:47:22 crc kubenswrapper[4906]: I0227 08:47:22.759701 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-5fdb694969-28cxl" Feb 27 08:47:22 crc kubenswrapper[4906]: I0227 08:47:22.919011 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2mj9c" Feb 27 08:47:22 crc kubenswrapper[4906]: I0227 08:47:22.919349 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2mj9c" Feb 27 08:47:22 crc kubenswrapper[4906]: I0227 08:47:22.948228 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-55b5ff4dbb-s9mn2" Feb 27 08:47:22 crc kubenswrapper[4906]: I0227 08:47:22.951709 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-55b5ff4dbb-s9mn2" Feb 27 08:47:22 crc kubenswrapper[4906]: I0227 08:47:22.982540 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-bccc79885-p8gxp" Feb 27 08:47:22 crc kubenswrapper[4906]: I0227 08:47:22.990204 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2mj9c" Feb 27 08:47:23 crc kubenswrapper[4906]: I0227 08:47:23.112109 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-cf99c678f-jn9zw" event={"ID":"aba839b0-d1ee-454e-b138-4e3656ea150d","Type":"ContainerStarted","Data":"9b2d1c0cd451d72dfb80af0827635402c379bc970ff9583389322d8f04f69c80"} Feb 27 08:47:23 crc kubenswrapper[4906]: I0227 08:47:23.113024 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-cf99c678f-jn9zw" Feb 27 08:47:23 crc kubenswrapper[4906]: I0227 08:47:23.123018 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-5d87c9d997-8q7nm" event={"ID":"1bb840aa-a248-4f16-8b8e-2710d728a7f8","Type":"ContainerStarted","Data":"03e7182062361884b90f1aa50b7fe90562f31cbf165971ac172ca40088b6f59b"} Feb 27 08:47:23 crc kubenswrapper[4906]: I0227 08:47:23.124090 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-5d87c9d997-8q7nm" Feb 27 08:47:23 crc kubenswrapper[4906]: I0227 08:47:23.146225 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-cf99c678f-jn9zw" podStartSLOduration=3.762205548 podStartE2EDuration="42.146198748s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:46:43.624091981 +0000 UTC m=+1102.018493591" lastFinishedPulling="2026-02-27 08:47:22.008085181 +0000 UTC m=+1140.402486791" observedRunningTime="2026-02-27 08:47:23.145409827 +0000 UTC m=+1141.539811437" watchObservedRunningTime="2026-02-27 08:47:23.146198748 +0000 UTC m=+1141.540600358" Feb 27 08:47:23 crc kubenswrapper[4906]: I0227 08:47:23.193985 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-5d87c9d997-8q7nm" podStartSLOduration=3.91060247 podStartE2EDuration="42.193956164s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:46:43.724310616 +0000 UTC m=+1102.118712226" lastFinishedPulling="2026-02-27 08:47:22.00766431 +0000 UTC m=+1140.402065920" observedRunningTime="2026-02-27 08:47:23.189016394 +0000 UTC m=+1141.583418004" watchObservedRunningTime="2026-02-27 08:47:23.193956164 +0000 UTC m=+1141.588357774" Feb 27 08:47:23 crc kubenswrapper[4906]: I0227 08:47:23.258847 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2mj9c" Feb 27 08:47:23 crc kubenswrapper[4906]: I0227 08:47:23.367513 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2mj9c"] Feb 27 08:47:24 crc kubenswrapper[4906]: I0227 08:47:24.133171 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pppgx" event={"ID":"37c2354e-5123-4644-ac6e-416ab22ecde4","Type":"ContainerStarted","Data":"40b5019a95e7dfa8668feea14ad47c58209f1f0d35a4555cefbffc8f193e9409"} Feb 27 08:47:24 crc kubenswrapper[4906]: I0227 08:47:24.159629 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-pppgx" podStartSLOduration=3.216591485 podStartE2EDuration="42.159606075s" podCreationTimestamp="2026-02-27 08:46:42 +0000 UTC" firstStartedPulling="2026-02-27 08:46:44.073148529 +0000 UTC m=+1102.467550139" lastFinishedPulling="2026-02-27 08:47:23.016163119 +0000 UTC m=+1141.410564729" observedRunningTime="2026-02-27 08:47:24.153109284 +0000 UTC m=+1142.547510894" watchObservedRunningTime="2026-02-27 08:47:24.159606075 +0000 UTC m=+1142.554007685" Feb 27 08:47:24 crc kubenswrapper[4906]: I0227 08:47:24.509216 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-6dfddd8f7d-xcp5b" Feb 27 08:47:24 crc kubenswrapper[4906]: I0227 08:47:24.845033 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:47:24 crc kubenswrapper[4906]: I0227 08:47:24.845121 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:47:24 crc kubenswrapper[4906]: I0227 08:47:24.845175 4906 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:47:24 crc kubenswrapper[4906]: I0227 08:47:24.845828 4906 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bb55c20fecc20d21bdfb369cf4ea10a10466e88d9a7657f3958a0393ee619f89"} pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 27 08:47:24 crc kubenswrapper[4906]: I0227 08:47:24.845895 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" containerID="cri-o://bb55c20fecc20d21bdfb369cf4ea10a10466e88d9a7657f3958a0393ee619f89" gracePeriod=600 Feb 27 08:47:25 crc kubenswrapper[4906]: I0227 08:47:25.158103 4906 generic.go:334] "Generic (PLEG): container finished" podID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerID="bb55c20fecc20d21bdfb369cf4ea10a10466e88d9a7657f3958a0393ee619f89" exitCode=0 Feb 27 08:47:25 crc kubenswrapper[4906]: I0227 08:47:25.158380 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2mj9c" podUID="744c08ea-1798-4480-9a26-d5ec4c3843e2" containerName="registry-server" containerID="cri-o://661192151a385a2060cc98988401590693aeaf6491cd6e2b8eeaa823341920d6" gracePeriod=2 Feb 27 08:47:25 crc kubenswrapper[4906]: I0227 08:47:25.159270 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerDied","Data":"bb55c20fecc20d21bdfb369cf4ea10a10466e88d9a7657f3958a0393ee619f89"} Feb 27 08:47:25 crc kubenswrapper[4906]: I0227 08:47:25.159362 4906 scope.go:117] "RemoveContainer" containerID="dd35a2232190406068e2b0898196cd8569373748f1f44babb52802f39e40a3ab" Feb 27 08:47:25 crc kubenswrapper[4906]: I0227 08:47:25.646047 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2mj9c" Feb 27 08:47:25 crc kubenswrapper[4906]: I0227 08:47:25.690220 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j5pvr\" (UniqueName: \"kubernetes.io/projected/744c08ea-1798-4480-9a26-d5ec4c3843e2-kube-api-access-j5pvr\") pod \"744c08ea-1798-4480-9a26-d5ec4c3843e2\" (UID: \"744c08ea-1798-4480-9a26-d5ec4c3843e2\") " Feb 27 08:47:25 crc kubenswrapper[4906]: I0227 08:47:25.690317 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/744c08ea-1798-4480-9a26-d5ec4c3843e2-catalog-content\") pod \"744c08ea-1798-4480-9a26-d5ec4c3843e2\" (UID: \"744c08ea-1798-4480-9a26-d5ec4c3843e2\") " Feb 27 08:47:25 crc kubenswrapper[4906]: I0227 08:47:25.690465 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/744c08ea-1798-4480-9a26-d5ec4c3843e2-utilities\") pod \"744c08ea-1798-4480-9a26-d5ec4c3843e2\" (UID: \"744c08ea-1798-4480-9a26-d5ec4c3843e2\") " Feb 27 08:47:25 crc kubenswrapper[4906]: I0227 08:47:25.693365 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/744c08ea-1798-4480-9a26-d5ec4c3843e2-utilities" (OuterVolumeSpecName: "utilities") pod "744c08ea-1798-4480-9a26-d5ec4c3843e2" (UID: "744c08ea-1798-4480-9a26-d5ec4c3843e2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:47:25 crc kubenswrapper[4906]: I0227 08:47:25.697327 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/744c08ea-1798-4480-9a26-d5ec4c3843e2-kube-api-access-j5pvr" (OuterVolumeSpecName: "kube-api-access-j5pvr") pod "744c08ea-1798-4480-9a26-d5ec4c3843e2" (UID: "744c08ea-1798-4480-9a26-d5ec4c3843e2"). InnerVolumeSpecName "kube-api-access-j5pvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:47:25 crc kubenswrapper[4906]: I0227 08:47:25.720950 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/744c08ea-1798-4480-9a26-d5ec4c3843e2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "744c08ea-1798-4480-9a26-d5ec4c3843e2" (UID: "744c08ea-1798-4480-9a26-d5ec4c3843e2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:47:25 crc kubenswrapper[4906]: I0227 08:47:25.792119 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/744c08ea-1798-4480-9a26-d5ec4c3843e2-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 08:47:25 crc kubenswrapper[4906]: I0227 08:47:25.792164 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j5pvr\" (UniqueName: \"kubernetes.io/projected/744c08ea-1798-4480-9a26-d5ec4c3843e2-kube-api-access-j5pvr\") on node \"crc\" DevicePath \"\"" Feb 27 08:47:25 crc kubenswrapper[4906]: I0227 08:47:25.792178 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/744c08ea-1798-4480-9a26-d5ec4c3843e2-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 08:47:26 crc kubenswrapper[4906]: I0227 08:47:26.171004 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerStarted","Data":"5fcba02c6ff9fc89671410ba4e06ef0b888d1413d1b9d9a87dec063811640cb4"} Feb 27 08:47:26 crc kubenswrapper[4906]: I0227 08:47:26.174476 4906 generic.go:334] "Generic (PLEG): container finished" podID="744c08ea-1798-4480-9a26-d5ec4c3843e2" containerID="661192151a385a2060cc98988401590693aeaf6491cd6e2b8eeaa823341920d6" exitCode=0 Feb 27 08:47:26 crc kubenswrapper[4906]: I0227 08:47:26.174536 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2mj9c" event={"ID":"744c08ea-1798-4480-9a26-d5ec4c3843e2","Type":"ContainerDied","Data":"661192151a385a2060cc98988401590693aeaf6491cd6e2b8eeaa823341920d6"} Feb 27 08:47:26 crc kubenswrapper[4906]: I0227 08:47:26.174560 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2mj9c" Feb 27 08:47:26 crc kubenswrapper[4906]: I0227 08:47:26.174587 4906 scope.go:117] "RemoveContainer" containerID="661192151a385a2060cc98988401590693aeaf6491cd6e2b8eeaa823341920d6" Feb 27 08:47:26 crc kubenswrapper[4906]: I0227 08:47:26.174568 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2mj9c" event={"ID":"744c08ea-1798-4480-9a26-d5ec4c3843e2","Type":"ContainerDied","Data":"0c8358291a2bb9a6d3b52e88f732a98f42a4eed6659bff819b96b05d4ad08d2f"} Feb 27 08:47:26 crc kubenswrapper[4906]: I0227 08:47:26.231887 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2mj9c"] Feb 27 08:47:26 crc kubenswrapper[4906]: I0227 08:47:26.240563 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2mj9c"] Feb 27 08:47:26 crc kubenswrapper[4906]: I0227 08:47:26.566342 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="744c08ea-1798-4480-9a26-d5ec4c3843e2" path="/var/lib/kubelet/pods/744c08ea-1798-4480-9a26-d5ec4c3843e2/volumes" Feb 27 08:47:26 crc kubenswrapper[4906]: I0227 08:47:26.866386 4906 scope.go:117] "RemoveContainer" containerID="ae4f741721823a75ef4de5459e64f5846d9513f2c4c146cabb9895a90d84e1d6" Feb 27 08:47:26 crc kubenswrapper[4906]: I0227 08:47:26.895309 4906 scope.go:117] "RemoveContainer" containerID="c971c8cab16e35890a503e3d8bff52f5bf33485e8091ce2605329844e3c5634b" Feb 27 08:47:26 crc kubenswrapper[4906]: I0227 08:47:26.959727 4906 scope.go:117] "RemoveContainer" containerID="661192151a385a2060cc98988401590693aeaf6491cd6e2b8eeaa823341920d6" Feb 27 08:47:26 crc kubenswrapper[4906]: E0227 08:47:26.960489 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"661192151a385a2060cc98988401590693aeaf6491cd6e2b8eeaa823341920d6\": container with ID starting with 661192151a385a2060cc98988401590693aeaf6491cd6e2b8eeaa823341920d6 not found: ID does not exist" containerID="661192151a385a2060cc98988401590693aeaf6491cd6e2b8eeaa823341920d6" Feb 27 08:47:26 crc kubenswrapper[4906]: I0227 08:47:26.960538 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"661192151a385a2060cc98988401590693aeaf6491cd6e2b8eeaa823341920d6"} err="failed to get container status \"661192151a385a2060cc98988401590693aeaf6491cd6e2b8eeaa823341920d6\": rpc error: code = NotFound desc = could not find container \"661192151a385a2060cc98988401590693aeaf6491cd6e2b8eeaa823341920d6\": container with ID starting with 661192151a385a2060cc98988401590693aeaf6491cd6e2b8eeaa823341920d6 not found: ID does not exist" Feb 27 08:47:26 crc kubenswrapper[4906]: I0227 08:47:26.960569 4906 scope.go:117] "RemoveContainer" containerID="ae4f741721823a75ef4de5459e64f5846d9513f2c4c146cabb9895a90d84e1d6" Feb 27 08:47:26 crc kubenswrapper[4906]: E0227 08:47:26.960852 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae4f741721823a75ef4de5459e64f5846d9513f2c4c146cabb9895a90d84e1d6\": container with ID starting with ae4f741721823a75ef4de5459e64f5846d9513f2c4c146cabb9895a90d84e1d6 not found: ID does not exist" containerID="ae4f741721823a75ef4de5459e64f5846d9513f2c4c146cabb9895a90d84e1d6" Feb 27 08:47:26 crc kubenswrapper[4906]: I0227 08:47:26.960880 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae4f741721823a75ef4de5459e64f5846d9513f2c4c146cabb9895a90d84e1d6"} err="failed to get container status \"ae4f741721823a75ef4de5459e64f5846d9513f2c4c146cabb9895a90d84e1d6\": rpc error: code = NotFound desc = could not find container \"ae4f741721823a75ef4de5459e64f5846d9513f2c4c146cabb9895a90d84e1d6\": container with ID starting with ae4f741721823a75ef4de5459e64f5846d9513f2c4c146cabb9895a90d84e1d6 not found: ID does not exist" Feb 27 08:47:26 crc kubenswrapper[4906]: I0227 08:47:26.960913 4906 scope.go:117] "RemoveContainer" containerID="c971c8cab16e35890a503e3d8bff52f5bf33485e8091ce2605329844e3c5634b" Feb 27 08:47:26 crc kubenswrapper[4906]: E0227 08:47:26.961418 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c971c8cab16e35890a503e3d8bff52f5bf33485e8091ce2605329844e3c5634b\": container with ID starting with c971c8cab16e35890a503e3d8bff52f5bf33485e8091ce2605329844e3c5634b not found: ID does not exist" containerID="c971c8cab16e35890a503e3d8bff52f5bf33485e8091ce2605329844e3c5634b" Feb 27 08:47:26 crc kubenswrapper[4906]: I0227 08:47:26.961448 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c971c8cab16e35890a503e3d8bff52f5bf33485e8091ce2605329844e3c5634b"} err="failed to get container status \"c971c8cab16e35890a503e3d8bff52f5bf33485e8091ce2605329844e3c5634b\": rpc error: code = NotFound desc = could not find container \"c971c8cab16e35890a503e3d8bff52f5bf33485e8091ce2605329844e3c5634b\": container with ID starting with c971c8cab16e35890a503e3d8bff52f5bf33485e8091ce2605329844e3c5634b not found: ID does not exist" Feb 27 08:47:27 crc kubenswrapper[4906]: I0227 08:47:27.188271 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-556b8b874-fpnbv" event={"ID":"a9932342-3a2d-4621-b4f1-048d92eef4c2","Type":"ContainerStarted","Data":"d0f5e97d4b21dc9e70cdd18e78a8d85fa9145ad46ad1064b9a630384a24b13d8"} Feb 27 08:47:27 crc kubenswrapper[4906]: I0227 08:47:27.189030 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-556b8b874-fpnbv" Feb 27 08:47:27 crc kubenswrapper[4906]: I0227 08:47:27.578759 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-556b8b874-fpnbv" podStartSLOduration=3.5705556769999998 podStartE2EDuration="46.57872493s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:46:43.859699216 +0000 UTC m=+1102.254100826" lastFinishedPulling="2026-02-27 08:47:26.867868469 +0000 UTC m=+1145.262270079" observedRunningTime="2026-02-27 08:47:27.231329716 +0000 UTC m=+1145.625731336" watchObservedRunningTime="2026-02-27 08:47:27.57872493 +0000 UTC m=+1145.973126560" Feb 27 08:47:27 crc kubenswrapper[4906]: I0227 08:47:27.612711 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-f7fcc58b9-xs4j4" Feb 27 08:47:28 crc kubenswrapper[4906]: I0227 08:47:28.199648 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-55ffd4876b-wtrsc" event={"ID":"6d51bdfc-e48a-44ff-a56c-9400e320fa7f","Type":"ContainerStarted","Data":"c5eb51165fb09adfec28e34b62507574117303c985d35dd29d138eb16ff43de4"} Feb 27 08:47:28 crc kubenswrapper[4906]: I0227 08:47:28.200380 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-55ffd4876b-wtrsc" Feb 27 08:47:28 crc kubenswrapper[4906]: I0227 08:47:28.224446 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-55ffd4876b-wtrsc" podStartSLOduration=3.327608341 podStartE2EDuration="47.224418299s" podCreationTimestamp="2026-02-27 08:46:41 +0000 UTC" firstStartedPulling="2026-02-27 08:46:44.056171673 +0000 UTC m=+1102.450573283" lastFinishedPulling="2026-02-27 08:47:27.952981591 +0000 UTC m=+1146.347383241" observedRunningTime="2026-02-27 08:47:28.219341515 +0000 UTC m=+1146.613743125" watchObservedRunningTime="2026-02-27 08:47:28.224418299 +0000 UTC m=+1146.618819929" Feb 27 08:47:28 crc kubenswrapper[4906]: I0227 08:47:28.324050 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b" Feb 27 08:47:31 crc kubenswrapper[4906]: I0227 08:47:31.897603 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-5d87c9d997-8q7nm" Feb 27 08:47:31 crc kubenswrapper[4906]: I0227 08:47:31.909850 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-cf99c678f-jn9zw" Feb 27 08:47:32 crc kubenswrapper[4906]: I0227 08:47:32.066858 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-545456dc4-5x4jc" Feb 27 08:47:32 crc kubenswrapper[4906]: I0227 08:47:32.316618 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-556b8b874-fpnbv" Feb 27 08:47:32 crc kubenswrapper[4906]: I0227 08:47:32.398678 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-5d86c7ddb7-5h7k9" Feb 27 08:47:32 crc kubenswrapper[4906]: I0227 08:47:32.776641 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-648564c9fc-p8w7q" Feb 27 08:47:42 crc kubenswrapper[4906]: I0227 08:47:42.174138 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-55ffd4876b-wtrsc" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.577395 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-m7cpb"] Feb 27 08:47:59 crc kubenswrapper[4906]: E0227 08:47:59.578561 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="744c08ea-1798-4480-9a26-d5ec4c3843e2" containerName="registry-server" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.578582 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="744c08ea-1798-4480-9a26-d5ec4c3843e2" containerName="registry-server" Feb 27 08:47:59 crc kubenswrapper[4906]: E0227 08:47:59.578599 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38b40d50-8598-4279-a798-c499b5152457" containerName="registry-server" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.578607 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="38b40d50-8598-4279-a798-c499b5152457" containerName="registry-server" Feb 27 08:47:59 crc kubenswrapper[4906]: E0227 08:47:59.578627 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="744c08ea-1798-4480-9a26-d5ec4c3843e2" containerName="extract-content" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.578636 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="744c08ea-1798-4480-9a26-d5ec4c3843e2" containerName="extract-content" Feb 27 08:47:59 crc kubenswrapper[4906]: E0227 08:47:59.578658 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="744c08ea-1798-4480-9a26-d5ec4c3843e2" containerName="extract-utilities" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.578666 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="744c08ea-1798-4480-9a26-d5ec4c3843e2" containerName="extract-utilities" Feb 27 08:47:59 crc kubenswrapper[4906]: E0227 08:47:59.578681 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38b40d50-8598-4279-a798-c499b5152457" containerName="extract-utilities" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.578688 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="38b40d50-8598-4279-a798-c499b5152457" containerName="extract-utilities" Feb 27 08:47:59 crc kubenswrapper[4906]: E0227 08:47:59.578704 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38b40d50-8598-4279-a798-c499b5152457" containerName="extract-content" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.578710 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="38b40d50-8598-4279-a798-c499b5152457" containerName="extract-content" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.579264 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="744c08ea-1798-4480-9a26-d5ec4c3843e2" containerName="registry-server" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.579300 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="38b40d50-8598-4279-a798-c499b5152457" containerName="registry-server" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.580402 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-m7cpb" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.590492 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.591196 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.593288 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-m7cpb"] Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.594177 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.594989 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-cwhs7" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.676413 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-f92xb"] Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.678388 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-f92xb" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.680650 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.697828 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-f92xb"] Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.702661 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c537a85f-4479-417a-bbb0-4a3c603eac2d-config\") pod \"dnsmasq-dns-675f4bcbfc-m7cpb\" (UID: \"c537a85f-4479-417a-bbb0-4a3c603eac2d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-m7cpb" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.702767 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h27hn\" (UniqueName: \"kubernetes.io/projected/c537a85f-4479-417a-bbb0-4a3c603eac2d-kube-api-access-h27hn\") pod \"dnsmasq-dns-675f4bcbfc-m7cpb\" (UID: \"c537a85f-4479-417a-bbb0-4a3c603eac2d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-m7cpb" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.804124 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a561d8e-97e1-4f2f-8143-67411c643b19-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-f92xb\" (UID: \"1a561d8e-97e1-4f2f-8143-67411c643b19\") " pod="openstack/dnsmasq-dns-78dd6ddcc-f92xb" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.804571 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c537a85f-4479-417a-bbb0-4a3c603eac2d-config\") pod \"dnsmasq-dns-675f4bcbfc-m7cpb\" (UID: \"c537a85f-4479-417a-bbb0-4a3c603eac2d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-m7cpb" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.804758 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wxqt\" (UniqueName: \"kubernetes.io/projected/1a561d8e-97e1-4f2f-8143-67411c643b19-kube-api-access-5wxqt\") pod \"dnsmasq-dns-78dd6ddcc-f92xb\" (UID: \"1a561d8e-97e1-4f2f-8143-67411c643b19\") " pod="openstack/dnsmasq-dns-78dd6ddcc-f92xb" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.804933 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h27hn\" (UniqueName: \"kubernetes.io/projected/c537a85f-4479-417a-bbb0-4a3c603eac2d-kube-api-access-h27hn\") pod \"dnsmasq-dns-675f4bcbfc-m7cpb\" (UID: \"c537a85f-4479-417a-bbb0-4a3c603eac2d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-m7cpb" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.805046 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a561d8e-97e1-4f2f-8143-67411c643b19-config\") pod \"dnsmasq-dns-78dd6ddcc-f92xb\" (UID: \"1a561d8e-97e1-4f2f-8143-67411c643b19\") " pod="openstack/dnsmasq-dns-78dd6ddcc-f92xb" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.805828 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c537a85f-4479-417a-bbb0-4a3c603eac2d-config\") pod \"dnsmasq-dns-675f4bcbfc-m7cpb\" (UID: \"c537a85f-4479-417a-bbb0-4a3c603eac2d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-m7cpb" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.824366 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h27hn\" (UniqueName: \"kubernetes.io/projected/c537a85f-4479-417a-bbb0-4a3c603eac2d-kube-api-access-h27hn\") pod \"dnsmasq-dns-675f4bcbfc-m7cpb\" (UID: \"c537a85f-4479-417a-bbb0-4a3c603eac2d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-m7cpb" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.906309 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a561d8e-97e1-4f2f-8143-67411c643b19-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-f92xb\" (UID: \"1a561d8e-97e1-4f2f-8143-67411c643b19\") " pod="openstack/dnsmasq-dns-78dd6ddcc-f92xb" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.906452 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wxqt\" (UniqueName: \"kubernetes.io/projected/1a561d8e-97e1-4f2f-8143-67411c643b19-kube-api-access-5wxqt\") pod \"dnsmasq-dns-78dd6ddcc-f92xb\" (UID: \"1a561d8e-97e1-4f2f-8143-67411c643b19\") " pod="openstack/dnsmasq-dns-78dd6ddcc-f92xb" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.906508 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a561d8e-97e1-4f2f-8143-67411c643b19-config\") pod \"dnsmasq-dns-78dd6ddcc-f92xb\" (UID: \"1a561d8e-97e1-4f2f-8143-67411c643b19\") " pod="openstack/dnsmasq-dns-78dd6ddcc-f92xb" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.906605 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-m7cpb" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.907429 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a561d8e-97e1-4f2f-8143-67411c643b19-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-f92xb\" (UID: \"1a561d8e-97e1-4f2f-8143-67411c643b19\") " pod="openstack/dnsmasq-dns-78dd6ddcc-f92xb" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.907633 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a561d8e-97e1-4f2f-8143-67411c643b19-config\") pod \"dnsmasq-dns-78dd6ddcc-f92xb\" (UID: \"1a561d8e-97e1-4f2f-8143-67411c643b19\") " pod="openstack/dnsmasq-dns-78dd6ddcc-f92xb" Feb 27 08:47:59 crc kubenswrapper[4906]: I0227 08:47:59.930636 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wxqt\" (UniqueName: \"kubernetes.io/projected/1a561d8e-97e1-4f2f-8143-67411c643b19-kube-api-access-5wxqt\") pod \"dnsmasq-dns-78dd6ddcc-f92xb\" (UID: \"1a561d8e-97e1-4f2f-8143-67411c643b19\") " pod="openstack/dnsmasq-dns-78dd6ddcc-f92xb" Feb 27 08:48:00 crc kubenswrapper[4906]: I0227 08:48:00.001495 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-f92xb" Feb 27 08:48:00 crc kubenswrapper[4906]: I0227 08:48:00.137669 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536368-tsnrz"] Feb 27 08:48:00 crc kubenswrapper[4906]: I0227 08:48:00.139651 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536368-tsnrz" Feb 27 08:48:00 crc kubenswrapper[4906]: I0227 08:48:00.142269 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 08:48:00 crc kubenswrapper[4906]: I0227 08:48:00.142512 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 08:48:00 crc kubenswrapper[4906]: I0227 08:48:00.149913 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536368-tsnrz"] Feb 27 08:48:00 crc kubenswrapper[4906]: I0227 08:48:00.150559 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 08:48:00 crc kubenswrapper[4906]: I0227 08:48:00.212178 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gztxx\" (UniqueName: \"kubernetes.io/projected/6e6ebab9-7ce0-4b39-8f00-bb43625e784a-kube-api-access-gztxx\") pod \"auto-csr-approver-29536368-tsnrz\" (UID: \"6e6ebab9-7ce0-4b39-8f00-bb43625e784a\") " pod="openshift-infra/auto-csr-approver-29536368-tsnrz" Feb 27 08:48:00 crc kubenswrapper[4906]: I0227 08:48:00.313620 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gztxx\" (UniqueName: \"kubernetes.io/projected/6e6ebab9-7ce0-4b39-8f00-bb43625e784a-kube-api-access-gztxx\") pod \"auto-csr-approver-29536368-tsnrz\" (UID: \"6e6ebab9-7ce0-4b39-8f00-bb43625e784a\") " pod="openshift-infra/auto-csr-approver-29536368-tsnrz" Feb 27 08:48:00 crc kubenswrapper[4906]: I0227 08:48:00.326789 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-f92xb"] Feb 27 08:48:00 crc kubenswrapper[4906]: I0227 08:48:00.336649 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gztxx\" (UniqueName: \"kubernetes.io/projected/6e6ebab9-7ce0-4b39-8f00-bb43625e784a-kube-api-access-gztxx\") pod \"auto-csr-approver-29536368-tsnrz\" (UID: \"6e6ebab9-7ce0-4b39-8f00-bb43625e784a\") " pod="openshift-infra/auto-csr-approver-29536368-tsnrz" Feb 27 08:48:00 crc kubenswrapper[4906]: I0227 08:48:00.346232 4906 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 27 08:48:00 crc kubenswrapper[4906]: I0227 08:48:00.395516 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-m7cpb"] Feb 27 08:48:00 crc kubenswrapper[4906]: W0227 08:48:00.397024 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc537a85f_4479_417a_bbb0_4a3c603eac2d.slice/crio-8a885ec781837924b7d3276ca19dbc1d641b8c0fd0019810e5a0df7aa7851efe WatchSource:0}: Error finding container 8a885ec781837924b7d3276ca19dbc1d641b8c0fd0019810e5a0df7aa7851efe: Status 404 returned error can't find the container with id 8a885ec781837924b7d3276ca19dbc1d641b8c0fd0019810e5a0df7aa7851efe Feb 27 08:48:00 crc kubenswrapper[4906]: I0227 08:48:00.457607 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-f92xb" event={"ID":"1a561d8e-97e1-4f2f-8143-67411c643b19","Type":"ContainerStarted","Data":"48ba3bf29b85bcb51f075a11fa15e2952810d7903809cb6a17db965ae37bba1f"} Feb 27 08:48:00 crc kubenswrapper[4906]: I0227 08:48:00.459005 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-m7cpb" event={"ID":"c537a85f-4479-417a-bbb0-4a3c603eac2d","Type":"ContainerStarted","Data":"8a885ec781837924b7d3276ca19dbc1d641b8c0fd0019810e5a0df7aa7851efe"} Feb 27 08:48:00 crc kubenswrapper[4906]: I0227 08:48:00.472754 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536368-tsnrz" Feb 27 08:48:00 crc kubenswrapper[4906]: I0227 08:48:00.924644 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536368-tsnrz"] Feb 27 08:48:01 crc kubenswrapper[4906]: I0227 08:48:01.469519 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536368-tsnrz" event={"ID":"6e6ebab9-7ce0-4b39-8f00-bb43625e784a","Type":"ContainerStarted","Data":"da18c3c3321d00605e587f7a53ebb80bf5d5183157d8ab1f653244110fa2325b"} Feb 27 08:48:02 crc kubenswrapper[4906]: I0227 08:48:02.544017 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-m7cpb"] Feb 27 08:48:02 crc kubenswrapper[4906]: I0227 08:48:02.581496 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-4m2sq"] Feb 27 08:48:02 crc kubenswrapper[4906]: I0227 08:48:02.583199 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" Feb 27 08:48:02 crc kubenswrapper[4906]: I0227 08:48:02.599987 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-4m2sq"] Feb 27 08:48:02 crc kubenswrapper[4906]: I0227 08:48:02.674062 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b70ee3a-8626-4cee-b807-b9fe7aebac31-config\") pod \"dnsmasq-dns-5ccc8479f9-4m2sq\" (UID: \"8b70ee3a-8626-4cee-b807-b9fe7aebac31\") " pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" Feb 27 08:48:02 crc kubenswrapper[4906]: I0227 08:48:02.674207 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f6qdn\" (UniqueName: \"kubernetes.io/projected/8b70ee3a-8626-4cee-b807-b9fe7aebac31-kube-api-access-f6qdn\") pod \"dnsmasq-dns-5ccc8479f9-4m2sq\" (UID: \"8b70ee3a-8626-4cee-b807-b9fe7aebac31\") " pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" Feb 27 08:48:02 crc kubenswrapper[4906]: I0227 08:48:02.674263 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b70ee3a-8626-4cee-b807-b9fe7aebac31-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-4m2sq\" (UID: \"8b70ee3a-8626-4cee-b807-b9fe7aebac31\") " pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" Feb 27 08:48:02 crc kubenswrapper[4906]: I0227 08:48:02.775653 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f6qdn\" (UniqueName: \"kubernetes.io/projected/8b70ee3a-8626-4cee-b807-b9fe7aebac31-kube-api-access-f6qdn\") pod \"dnsmasq-dns-5ccc8479f9-4m2sq\" (UID: \"8b70ee3a-8626-4cee-b807-b9fe7aebac31\") " pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" Feb 27 08:48:02 crc kubenswrapper[4906]: I0227 08:48:02.775719 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b70ee3a-8626-4cee-b807-b9fe7aebac31-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-4m2sq\" (UID: \"8b70ee3a-8626-4cee-b807-b9fe7aebac31\") " pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" Feb 27 08:48:02 crc kubenswrapper[4906]: I0227 08:48:02.775757 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b70ee3a-8626-4cee-b807-b9fe7aebac31-config\") pod \"dnsmasq-dns-5ccc8479f9-4m2sq\" (UID: \"8b70ee3a-8626-4cee-b807-b9fe7aebac31\") " pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" Feb 27 08:48:02 crc kubenswrapper[4906]: I0227 08:48:02.776943 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b70ee3a-8626-4cee-b807-b9fe7aebac31-config\") pod \"dnsmasq-dns-5ccc8479f9-4m2sq\" (UID: \"8b70ee3a-8626-4cee-b807-b9fe7aebac31\") " pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" Feb 27 08:48:02 crc kubenswrapper[4906]: I0227 08:48:02.777272 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b70ee3a-8626-4cee-b807-b9fe7aebac31-dns-svc\") pod \"dnsmasq-dns-5ccc8479f9-4m2sq\" (UID: \"8b70ee3a-8626-4cee-b807-b9fe7aebac31\") " pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" Feb 27 08:48:02 crc kubenswrapper[4906]: I0227 08:48:02.802860 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f6qdn\" (UniqueName: \"kubernetes.io/projected/8b70ee3a-8626-4cee-b807-b9fe7aebac31-kube-api-access-f6qdn\") pod \"dnsmasq-dns-5ccc8479f9-4m2sq\" (UID: \"8b70ee3a-8626-4cee-b807-b9fe7aebac31\") " pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" Feb 27 08:48:02 crc kubenswrapper[4906]: I0227 08:48:02.877129 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-f92xb"] Feb 27 08:48:02 crc kubenswrapper[4906]: I0227 08:48:02.923275 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" Feb 27 08:48:02 crc kubenswrapper[4906]: I0227 08:48:02.963941 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-nt4fx"] Feb 27 08:48:02 crc kubenswrapper[4906]: I0227 08:48:02.965974 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" Feb 27 08:48:02 crc kubenswrapper[4906]: I0227 08:48:02.981452 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-nt4fx"] Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.085202 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1bda1b5-8590-4e63-9676-25f6c58f072d-config\") pod \"dnsmasq-dns-57d769cc4f-nt4fx\" (UID: \"d1bda1b5-8590-4e63-9676-25f6c58f072d\") " pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.085275 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d1bda1b5-8590-4e63-9676-25f6c58f072d-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-nt4fx\" (UID: \"d1bda1b5-8590-4e63-9676-25f6c58f072d\") " pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.085390 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9npr\" (UniqueName: \"kubernetes.io/projected/d1bda1b5-8590-4e63-9676-25f6c58f072d-kube-api-access-t9npr\") pod \"dnsmasq-dns-57d769cc4f-nt4fx\" (UID: \"d1bda1b5-8590-4e63-9676-25f6c58f072d\") " pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.191780 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9npr\" (UniqueName: \"kubernetes.io/projected/d1bda1b5-8590-4e63-9676-25f6c58f072d-kube-api-access-t9npr\") pod \"dnsmasq-dns-57d769cc4f-nt4fx\" (UID: \"d1bda1b5-8590-4e63-9676-25f6c58f072d\") " pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.191911 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1bda1b5-8590-4e63-9676-25f6c58f072d-config\") pod \"dnsmasq-dns-57d769cc4f-nt4fx\" (UID: \"d1bda1b5-8590-4e63-9676-25f6c58f072d\") " pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.191941 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d1bda1b5-8590-4e63-9676-25f6c58f072d-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-nt4fx\" (UID: \"d1bda1b5-8590-4e63-9676-25f6c58f072d\") " pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.193077 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d1bda1b5-8590-4e63-9676-25f6c58f072d-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-nt4fx\" (UID: \"d1bda1b5-8590-4e63-9676-25f6c58f072d\") " pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.193212 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1bda1b5-8590-4e63-9676-25f6c58f072d-config\") pod \"dnsmasq-dns-57d769cc4f-nt4fx\" (UID: \"d1bda1b5-8590-4e63-9676-25f6c58f072d\") " pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.213562 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9npr\" (UniqueName: \"kubernetes.io/projected/d1bda1b5-8590-4e63-9676-25f6c58f072d-kube-api-access-t9npr\") pod \"dnsmasq-dns-57d769cc4f-nt4fx\" (UID: \"d1bda1b5-8590-4e63-9676-25f6c58f072d\") " pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.353753 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.541912 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-4m2sq"] Feb 27 08:48:03 crc kubenswrapper[4906]: W0227 08:48:03.554857 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8b70ee3a_8626_4cee_b807_b9fe7aebac31.slice/crio-827c0950abcd91bd1507b8137f4f24ad95643fa7dd6b94973ba82495c3adc652 WatchSource:0}: Error finding container 827c0950abcd91bd1507b8137f4f24ad95643fa7dd6b94973ba82495c3adc652: Status 404 returned error can't find the container with id 827c0950abcd91bd1507b8137f4f24ad95643fa7dd6b94973ba82495c3adc652 Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.767600 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.774368 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.777151 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.778111 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-p2xch" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.778331 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.778415 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.778697 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.778739 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.779015 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.799186 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.898755 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-nt4fx"] Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.913233 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/752c995e-5d01-4705-ab26-be06da61290d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.913302 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/752c995e-5d01-4705-ab26-be06da61290d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.913331 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.913357 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.913385 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.913411 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/752c995e-5d01-4705-ab26-be06da61290d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.913445 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/752c995e-5d01-4705-ab26-be06da61290d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.913474 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.913503 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckxnq\" (UniqueName: \"kubernetes.io/projected/752c995e-5d01-4705-ab26-be06da61290d-kube-api-access-ckxnq\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.913535 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/752c995e-5d01-4705-ab26-be06da61290d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:03 crc kubenswrapper[4906]: I0227 08:48:03.913567 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.015810 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/752c995e-5d01-4705-ab26-be06da61290d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.015945 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/752c995e-5d01-4705-ab26-be06da61290d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.016004 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.016034 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.016094 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.016119 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/752c995e-5d01-4705-ab26-be06da61290d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.016157 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/752c995e-5d01-4705-ab26-be06da61290d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.016179 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.016203 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckxnq\" (UniqueName: \"kubernetes.io/projected/752c995e-5d01-4705-ab26-be06da61290d-kube-api-access-ckxnq\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.016224 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/752c995e-5d01-4705-ab26-be06da61290d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.016253 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.016387 4906 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.016576 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.017415 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/752c995e-5d01-4705-ab26-be06da61290d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.017571 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.019159 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/752c995e-5d01-4705-ab26-be06da61290d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.019828 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/752c995e-5d01-4705-ab26-be06da61290d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.033824 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/752c995e-5d01-4705-ab26-be06da61290d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.034029 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.045217 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/752c995e-5d01-4705-ab26-be06da61290d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.051233 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckxnq\" (UniqueName: \"kubernetes.io/projected/752c995e-5d01-4705-ab26-be06da61290d-kube-api-access-ckxnq\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.058855 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.076123 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.094358 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.096117 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.104576 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-kt5pn" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.104836 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.105037 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.105174 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.105524 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.105692 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.128062 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.170246 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.171553 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.219825 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/288d23ef-ae52-4275-a827-ebf77b2823ea-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.219942 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75rbj\" (UniqueName: \"kubernetes.io/projected/288d23ef-ae52-4275-a827-ebf77b2823ea-kube-api-access-75rbj\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.219969 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/288d23ef-ae52-4275-a827-ebf77b2823ea-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.220007 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/288d23ef-ae52-4275-a827-ebf77b2823ea-server-conf\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.220040 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/288d23ef-ae52-4275-a827-ebf77b2823ea-config-data\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.220077 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/288d23ef-ae52-4275-a827-ebf77b2823ea-pod-info\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.220121 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.220191 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.220224 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.220249 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.220285 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.322574 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/288d23ef-ae52-4275-a827-ebf77b2823ea-config-data\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.322642 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/288d23ef-ae52-4275-a827-ebf77b2823ea-pod-info\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.322701 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.322731 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.322760 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.322780 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.322818 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.322920 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/288d23ef-ae52-4275-a827-ebf77b2823ea-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.323207 4906 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.323945 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.324259 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.322960 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75rbj\" (UniqueName: \"kubernetes.io/projected/288d23ef-ae52-4275-a827-ebf77b2823ea-kube-api-access-75rbj\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.325262 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/288d23ef-ae52-4275-a827-ebf77b2823ea-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.325308 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/288d23ef-ae52-4275-a827-ebf77b2823ea-server-conf\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.326284 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/288d23ef-ae52-4275-a827-ebf77b2823ea-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.326679 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/288d23ef-ae52-4275-a827-ebf77b2823ea-server-conf\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.327260 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/288d23ef-ae52-4275-a827-ebf77b2823ea-config-data\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.333049 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.333853 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/288d23ef-ae52-4275-a827-ebf77b2823ea-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.337841 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.339043 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/288d23ef-ae52-4275-a827-ebf77b2823ea-pod-info\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.352173 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75rbj\" (UniqueName: \"kubernetes.io/projected/288d23ef-ae52-4275-a827-ebf77b2823ea-kube-api-access-75rbj\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.354693 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.505189 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.522607 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" event={"ID":"d1bda1b5-8590-4e63-9676-25f6c58f072d","Type":"ContainerStarted","Data":"1ac97f3a57e64646005ead74ec2ac3056f71f313e339cfd50dbb1c0ab20a8ac8"} Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.524181 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" event={"ID":"8b70ee3a-8626-4cee-b807-b9fe7aebac31","Type":"ContainerStarted","Data":"827c0950abcd91bd1507b8137f4f24ad95643fa7dd6b94973ba82495c3adc652"} Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.525740 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536368-tsnrz" event={"ID":"6e6ebab9-7ce0-4b39-8f00-bb43625e784a","Type":"ContainerStarted","Data":"2b4e6472638d54358dd9429dcca0ac3b90e620e5c251cc657ca9849f7e1bc0a4"} Feb 27 08:48:04 crc kubenswrapper[4906]: I0227 08:48:04.719296 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 27 08:48:04 crc kubenswrapper[4906]: W0227 08:48:04.735704 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod752c995e_5d01_4705_ab26_be06da61290d.slice/crio-0b3848af1ef8ad5f5f2bdeb7e8c7d4586e9e2122de9b070dfc1a9f601d2323fd WatchSource:0}: Error finding container 0b3848af1ef8ad5f5f2bdeb7e8c7d4586e9e2122de9b070dfc1a9f601d2323fd: Status 404 returned error can't find the container with id 0b3848af1ef8ad5f5f2bdeb7e8c7d4586e9e2122de9b070dfc1a9f601d2323fd Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.018125 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.028714 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.036503 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.040376 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.040714 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.040963 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.041206 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-h747s" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.047083 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.055815 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.157437 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-config-data-generated\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.157525 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-config-data-default\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.157551 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-operator-scripts\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.157584 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.158203 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-kolla-config\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.158285 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.158310 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kk5l8\" (UniqueName: \"kubernetes.io/projected/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-kube-api-access-kk5l8\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.158347 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.260847 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.260953 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kk5l8\" (UniqueName: \"kubernetes.io/projected/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-kube-api-access-kk5l8\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.261348 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.261435 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-config-data-generated\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.261474 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-config-data-default\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.261502 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-operator-scripts\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.261527 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.261567 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-kolla-config\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.261760 4906 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.262513 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-config-data-generated\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.262676 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-kolla-config\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.262779 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-config-data-default\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.264288 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-operator-scripts\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.280969 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.290453 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.291043 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kk5l8\" (UniqueName: \"kubernetes.io/projected/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-kube-api-access-kk5l8\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.296197 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b\") " pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.411248 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.554183 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"752c995e-5d01-4705-ab26-be06da61290d","Type":"ContainerStarted","Data":"0b3848af1ef8ad5f5f2bdeb7e8c7d4586e9e2122de9b070dfc1a9f601d2323fd"} Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.557774 4906 generic.go:334] "Generic (PLEG): container finished" podID="6e6ebab9-7ce0-4b39-8f00-bb43625e784a" containerID="2b4e6472638d54358dd9429dcca0ac3b90e620e5c251cc657ca9849f7e1bc0a4" exitCode=0 Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.558343 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536368-tsnrz" event={"ID":"6e6ebab9-7ce0-4b39-8f00-bb43625e784a","Type":"ContainerDied","Data":"2b4e6472638d54358dd9429dcca0ac3b90e620e5c251cc657ca9849f7e1bc0a4"} Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.566934 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"288d23ef-ae52-4275-a827-ebf77b2823ea","Type":"ContainerStarted","Data":"c98fa7af7488ca0cd14703aae49587e30fca7342836ae3af8ebb8b61ed959025"} Feb 27 08:48:05 crc kubenswrapper[4906]: I0227 08:48:05.939560 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.303927 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.308718 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.311392 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-swdrh" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.311798 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.313036 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.313220 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.316480 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.392788 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/960971c6-e3d1-458e-9991-91cbcbeb9d5e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.392859 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/960971c6-e3d1-458e-9991-91cbcbeb9d5e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.393008 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/960971c6-e3d1-458e-9991-91cbcbeb9d5e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.393038 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/960971c6-e3d1-458e-9991-91cbcbeb9d5e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.393081 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/960971c6-e3d1-458e-9991-91cbcbeb9d5e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.393103 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.393133 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4ktg\" (UniqueName: \"kubernetes.io/projected/960971c6-e3d1-458e-9991-91cbcbeb9d5e-kube-api-access-k4ktg\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.393181 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/960971c6-e3d1-458e-9991-91cbcbeb9d5e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.495142 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/960971c6-e3d1-458e-9991-91cbcbeb9d5e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.501458 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/960971c6-e3d1-458e-9991-91cbcbeb9d5e-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.503160 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/960971c6-e3d1-458e-9991-91cbcbeb9d5e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.503254 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.503285 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/960971c6-e3d1-458e-9991-91cbcbeb9d5e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.503362 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4ktg\" (UniqueName: \"kubernetes.io/projected/960971c6-e3d1-458e-9991-91cbcbeb9d5e-kube-api-access-k4ktg\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.503435 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/960971c6-e3d1-458e-9991-91cbcbeb9d5e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.503562 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/960971c6-e3d1-458e-9991-91cbcbeb9d5e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.503602 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/960971c6-e3d1-458e-9991-91cbcbeb9d5e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.504399 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/960971c6-e3d1-458e-9991-91cbcbeb9d5e-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.505188 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/960971c6-e3d1-458e-9991-91cbcbeb9d5e-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.505548 4906 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.521375 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/960971c6-e3d1-458e-9991-91cbcbeb9d5e-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.522260 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/960971c6-e3d1-458e-9991-91cbcbeb9d5e-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.528294 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4ktg\" (UniqueName: \"kubernetes.io/projected/960971c6-e3d1-458e-9991-91cbcbeb9d5e-kube-api-access-k4ktg\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.541431 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/960971c6-e3d1-458e-9991-91cbcbeb9d5e-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.544914 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"openstack-cell1-galera-0\" (UID: \"960971c6-e3d1-458e-9991-91cbcbeb9d5e\") " pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.633499 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.640292 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b","Type":"ContainerStarted","Data":"6414d6303be1c71551d3445b29a9972ecf14d6e75745d40fe23cb7a2ed55c649"} Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.723129 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.724464 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.729455 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-n2th8" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.729729 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.731136 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.750705 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.809281 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/e678f55d-5f6a-4ce7-92f3-5a7b87803830-memcached-tls-certs\") pod \"memcached-0\" (UID: \"e678f55d-5f6a-4ce7-92f3-5a7b87803830\") " pod="openstack/memcached-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.809617 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e678f55d-5f6a-4ce7-92f3-5a7b87803830-kolla-config\") pod \"memcached-0\" (UID: \"e678f55d-5f6a-4ce7-92f3-5a7b87803830\") " pod="openstack/memcached-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.809663 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e678f55d-5f6a-4ce7-92f3-5a7b87803830-config-data\") pod \"memcached-0\" (UID: \"e678f55d-5f6a-4ce7-92f3-5a7b87803830\") " pod="openstack/memcached-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.809714 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljm8j\" (UniqueName: \"kubernetes.io/projected/e678f55d-5f6a-4ce7-92f3-5a7b87803830-kube-api-access-ljm8j\") pod \"memcached-0\" (UID: \"e678f55d-5f6a-4ce7-92f3-5a7b87803830\") " pod="openstack/memcached-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.809822 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e678f55d-5f6a-4ce7-92f3-5a7b87803830-combined-ca-bundle\") pod \"memcached-0\" (UID: \"e678f55d-5f6a-4ce7-92f3-5a7b87803830\") " pod="openstack/memcached-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.915063 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e678f55d-5f6a-4ce7-92f3-5a7b87803830-combined-ca-bundle\") pod \"memcached-0\" (UID: \"e678f55d-5f6a-4ce7-92f3-5a7b87803830\") " pod="openstack/memcached-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.915222 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/e678f55d-5f6a-4ce7-92f3-5a7b87803830-memcached-tls-certs\") pod \"memcached-0\" (UID: \"e678f55d-5f6a-4ce7-92f3-5a7b87803830\") " pod="openstack/memcached-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.915261 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e678f55d-5f6a-4ce7-92f3-5a7b87803830-kolla-config\") pod \"memcached-0\" (UID: \"e678f55d-5f6a-4ce7-92f3-5a7b87803830\") " pod="openstack/memcached-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.915334 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e678f55d-5f6a-4ce7-92f3-5a7b87803830-config-data\") pod \"memcached-0\" (UID: \"e678f55d-5f6a-4ce7-92f3-5a7b87803830\") " pod="openstack/memcached-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.915443 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljm8j\" (UniqueName: \"kubernetes.io/projected/e678f55d-5f6a-4ce7-92f3-5a7b87803830-kube-api-access-ljm8j\") pod \"memcached-0\" (UID: \"e678f55d-5f6a-4ce7-92f3-5a7b87803830\") " pod="openstack/memcached-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.916409 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e678f55d-5f6a-4ce7-92f3-5a7b87803830-kolla-config\") pod \"memcached-0\" (UID: \"e678f55d-5f6a-4ce7-92f3-5a7b87803830\") " pod="openstack/memcached-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.916565 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e678f55d-5f6a-4ce7-92f3-5a7b87803830-config-data\") pod \"memcached-0\" (UID: \"e678f55d-5f6a-4ce7-92f3-5a7b87803830\") " pod="openstack/memcached-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.922283 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e678f55d-5f6a-4ce7-92f3-5a7b87803830-combined-ca-bundle\") pod \"memcached-0\" (UID: \"e678f55d-5f6a-4ce7-92f3-5a7b87803830\") " pod="openstack/memcached-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.940307 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljm8j\" (UniqueName: \"kubernetes.io/projected/e678f55d-5f6a-4ce7-92f3-5a7b87803830-kube-api-access-ljm8j\") pod \"memcached-0\" (UID: \"e678f55d-5f6a-4ce7-92f3-5a7b87803830\") " pod="openstack/memcached-0" Feb 27 08:48:06 crc kubenswrapper[4906]: I0227 08:48:06.954612 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/e678f55d-5f6a-4ce7-92f3-5a7b87803830-memcached-tls-certs\") pod \"memcached-0\" (UID: \"e678f55d-5f6a-4ce7-92f3-5a7b87803830\") " pod="openstack/memcached-0" Feb 27 08:48:07 crc kubenswrapper[4906]: I0227 08:48:07.058666 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Feb 27 08:48:07 crc kubenswrapper[4906]: I0227 08:48:07.212366 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536368-tsnrz" Feb 27 08:48:07 crc kubenswrapper[4906]: I0227 08:48:07.346725 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gztxx\" (UniqueName: \"kubernetes.io/projected/6e6ebab9-7ce0-4b39-8f00-bb43625e784a-kube-api-access-gztxx\") pod \"6e6ebab9-7ce0-4b39-8f00-bb43625e784a\" (UID: \"6e6ebab9-7ce0-4b39-8f00-bb43625e784a\") " Feb 27 08:48:07 crc kubenswrapper[4906]: I0227 08:48:07.356467 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e6ebab9-7ce0-4b39-8f00-bb43625e784a-kube-api-access-gztxx" (OuterVolumeSpecName: "kube-api-access-gztxx") pod "6e6ebab9-7ce0-4b39-8f00-bb43625e784a" (UID: "6e6ebab9-7ce0-4b39-8f00-bb43625e784a"). InnerVolumeSpecName "kube-api-access-gztxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:48:07 crc kubenswrapper[4906]: I0227 08:48:07.388060 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Feb 27 08:48:07 crc kubenswrapper[4906]: W0227 08:48:07.402503 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod960971c6_e3d1_458e_9991_91cbcbeb9d5e.slice/crio-b12ee37ba259e0bba85973193ce649ba5a0908832d33b53a670b173fd53063e1 WatchSource:0}: Error finding container b12ee37ba259e0bba85973193ce649ba5a0908832d33b53a670b173fd53063e1: Status 404 returned error can't find the container with id b12ee37ba259e0bba85973193ce649ba5a0908832d33b53a670b173fd53063e1 Feb 27 08:48:07 crc kubenswrapper[4906]: I0227 08:48:07.457684 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gztxx\" (UniqueName: \"kubernetes.io/projected/6e6ebab9-7ce0-4b39-8f00-bb43625e784a-kube-api-access-gztxx\") on node \"crc\" DevicePath \"\"" Feb 27 08:48:07 crc kubenswrapper[4906]: I0227 08:48:07.664515 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536368-tsnrz" Feb 27 08:48:07 crc kubenswrapper[4906]: I0227 08:48:07.664522 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536368-tsnrz" event={"ID":"6e6ebab9-7ce0-4b39-8f00-bb43625e784a","Type":"ContainerDied","Data":"da18c3c3321d00605e587f7a53ebb80bf5d5183157d8ab1f653244110fa2325b"} Feb 27 08:48:07 crc kubenswrapper[4906]: I0227 08:48:07.664675 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da18c3c3321d00605e587f7a53ebb80bf5d5183157d8ab1f653244110fa2325b" Feb 27 08:48:07 crc kubenswrapper[4906]: I0227 08:48:07.680913 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"960971c6-e3d1-458e-9991-91cbcbeb9d5e","Type":"ContainerStarted","Data":"b12ee37ba259e0bba85973193ce649ba5a0908832d33b53a670b173fd53063e1"} Feb 27 08:48:07 crc kubenswrapper[4906]: I0227 08:48:07.778207 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Feb 27 08:48:07 crc kubenswrapper[4906]: W0227 08:48:07.807645 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode678f55d_5f6a_4ce7_92f3_5a7b87803830.slice/crio-5b324d2cccdb032732b8e9ab60e5238a5ea96ba9105b080488f057c8e35c0a8c WatchSource:0}: Error finding container 5b324d2cccdb032732b8e9ab60e5238a5ea96ba9105b080488f057c8e35c0a8c: Status 404 returned error can't find the container with id 5b324d2cccdb032732b8e9ab60e5238a5ea96ba9105b080488f057c8e35c0a8c Feb 27 08:48:08 crc kubenswrapper[4906]: I0227 08:48:08.329950 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536362-q8kgf"] Feb 27 08:48:08 crc kubenswrapper[4906]: I0227 08:48:08.360489 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536362-q8kgf"] Feb 27 08:48:08 crc kubenswrapper[4906]: I0227 08:48:08.590341 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="decab1a7-e73d-43f1-bfd1-ac749354500a" path="/var/lib/kubelet/pods/decab1a7-e73d-43f1-bfd1-ac749354500a/volumes" Feb 27 08:48:08 crc kubenswrapper[4906]: I0227 08:48:08.699255 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"e678f55d-5f6a-4ce7-92f3-5a7b87803830","Type":"ContainerStarted","Data":"5b324d2cccdb032732b8e9ab60e5238a5ea96ba9105b080488f057c8e35c0a8c"} Feb 27 08:48:09 crc kubenswrapper[4906]: I0227 08:48:09.020913 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Feb 27 08:48:09 crc kubenswrapper[4906]: E0227 08:48:09.021623 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e6ebab9-7ce0-4b39-8f00-bb43625e784a" containerName="oc" Feb 27 08:48:09 crc kubenswrapper[4906]: I0227 08:48:09.021693 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e6ebab9-7ce0-4b39-8f00-bb43625e784a" containerName="oc" Feb 27 08:48:09 crc kubenswrapper[4906]: I0227 08:48:09.021983 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e6ebab9-7ce0-4b39-8f00-bb43625e784a" containerName="oc" Feb 27 08:48:09 crc kubenswrapper[4906]: I0227 08:48:09.022779 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 27 08:48:09 crc kubenswrapper[4906]: I0227 08:48:09.032475 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-r5kmt" Feb 27 08:48:09 crc kubenswrapper[4906]: I0227 08:48:09.039751 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 27 08:48:09 crc kubenswrapper[4906]: I0227 08:48:09.100524 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtpf6\" (UniqueName: \"kubernetes.io/projected/eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5-kube-api-access-rtpf6\") pod \"kube-state-metrics-0\" (UID: \"eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5\") " pod="openstack/kube-state-metrics-0" Feb 27 08:48:09 crc kubenswrapper[4906]: I0227 08:48:09.202299 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtpf6\" (UniqueName: \"kubernetes.io/projected/eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5-kube-api-access-rtpf6\") pod \"kube-state-metrics-0\" (UID: \"eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5\") " pod="openstack/kube-state-metrics-0" Feb 27 08:48:09 crc kubenswrapper[4906]: I0227 08:48:09.231507 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtpf6\" (UniqueName: \"kubernetes.io/projected/eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5-kube-api-access-rtpf6\") pod \"kube-state-metrics-0\" (UID: \"eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5\") " pod="openstack/kube-state-metrics-0" Feb 27 08:48:09 crc kubenswrapper[4906]: I0227 08:48:09.367736 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 27 08:48:10 crc kubenswrapper[4906]: I0227 08:48:10.012611 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.220008 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-67lpp"] Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.221868 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.226364 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-6htgj" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.226814 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.227376 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.238984 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-h6fvw"] Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.241506 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.259305 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-67lpp"] Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.275193 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-h6fvw"] Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.278829 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/bd97ec8d-9d8f-4817-9770-d5392d4f60df-var-run-ovn\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.278897 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd97ec8d-9d8f-4817-9770-d5392d4f60df-combined-ca-bundle\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.278953 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/bd97ec8d-9d8f-4817-9770-d5392d4f60df-var-run\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.278987 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd97ec8d-9d8f-4817-9770-d5392d4f60df-ovn-controller-tls-certs\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.279012 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8bff3f91-e831-4a15-a078-639483433b26-var-run\") pod \"ovn-controller-ovs-h6fvw\" (UID: \"8bff3f91-e831-4a15-a078-639483433b26\") " pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.279036 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvh7k\" (UniqueName: \"kubernetes.io/projected/8bff3f91-e831-4a15-a078-639483433b26-kube-api-access-hvh7k\") pod \"ovn-controller-ovs-h6fvw\" (UID: \"8bff3f91-e831-4a15-a078-639483433b26\") " pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.279073 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/bd97ec8d-9d8f-4817-9770-d5392d4f60df-var-log-ovn\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.279097 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2qs4\" (UniqueName: \"kubernetes.io/projected/bd97ec8d-9d8f-4817-9770-d5392d4f60df-kube-api-access-x2qs4\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.279123 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8bff3f91-e831-4a15-a078-639483433b26-etc-ovs\") pod \"ovn-controller-ovs-h6fvw\" (UID: \"8bff3f91-e831-4a15-a078-639483433b26\") " pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.279160 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bff3f91-e831-4a15-a078-639483433b26-scripts\") pod \"ovn-controller-ovs-h6fvw\" (UID: \"8bff3f91-e831-4a15-a078-639483433b26\") " pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.279191 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8bff3f91-e831-4a15-a078-639483433b26-var-log\") pod \"ovn-controller-ovs-h6fvw\" (UID: \"8bff3f91-e831-4a15-a078-639483433b26\") " pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.279215 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bd97ec8d-9d8f-4817-9770-d5392d4f60df-scripts\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.279250 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8bff3f91-e831-4a15-a078-639483433b26-var-lib\") pod \"ovn-controller-ovs-h6fvw\" (UID: \"8bff3f91-e831-4a15-a078-639483433b26\") " pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.380786 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/bd97ec8d-9d8f-4817-9770-d5392d4f60df-var-run\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.380891 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd97ec8d-9d8f-4817-9770-d5392d4f60df-ovn-controller-tls-certs\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.380931 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8bff3f91-e831-4a15-a078-639483433b26-var-run\") pod \"ovn-controller-ovs-h6fvw\" (UID: \"8bff3f91-e831-4a15-a078-639483433b26\") " pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.380969 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvh7k\" (UniqueName: \"kubernetes.io/projected/8bff3f91-e831-4a15-a078-639483433b26-kube-api-access-hvh7k\") pod \"ovn-controller-ovs-h6fvw\" (UID: \"8bff3f91-e831-4a15-a078-639483433b26\") " pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.381020 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/bd97ec8d-9d8f-4817-9770-d5392d4f60df-var-log-ovn\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.381050 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2qs4\" (UniqueName: \"kubernetes.io/projected/bd97ec8d-9d8f-4817-9770-d5392d4f60df-kube-api-access-x2qs4\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.381076 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8bff3f91-e831-4a15-a078-639483433b26-etc-ovs\") pod \"ovn-controller-ovs-h6fvw\" (UID: \"8bff3f91-e831-4a15-a078-639483433b26\") " pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.381105 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bff3f91-e831-4a15-a078-639483433b26-scripts\") pod \"ovn-controller-ovs-h6fvw\" (UID: \"8bff3f91-e831-4a15-a078-639483433b26\") " pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.381126 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8bff3f91-e831-4a15-a078-639483433b26-var-log\") pod \"ovn-controller-ovs-h6fvw\" (UID: \"8bff3f91-e831-4a15-a078-639483433b26\") " pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.381142 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bd97ec8d-9d8f-4817-9770-d5392d4f60df-scripts\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.381169 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8bff3f91-e831-4a15-a078-639483433b26-var-lib\") pod \"ovn-controller-ovs-h6fvw\" (UID: \"8bff3f91-e831-4a15-a078-639483433b26\") " pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.381190 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/bd97ec8d-9d8f-4817-9770-d5392d4f60df-var-run-ovn\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.381217 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd97ec8d-9d8f-4817-9770-d5392d4f60df-combined-ca-bundle\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.381470 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/bd97ec8d-9d8f-4817-9770-d5392d4f60df-var-run\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.381889 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/8bff3f91-e831-4a15-a078-639483433b26-etc-ovs\") pod \"ovn-controller-ovs-h6fvw\" (UID: \"8bff3f91-e831-4a15-a078-639483433b26\") " pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.383172 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/8bff3f91-e831-4a15-a078-639483433b26-var-lib\") pod \"ovn-controller-ovs-h6fvw\" (UID: \"8bff3f91-e831-4a15-a078-639483433b26\") " pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.383487 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/8bff3f91-e831-4a15-a078-639483433b26-var-run\") pod \"ovn-controller-ovs-h6fvw\" (UID: \"8bff3f91-e831-4a15-a078-639483433b26\") " pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.383413 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/bd97ec8d-9d8f-4817-9770-d5392d4f60df-var-log-ovn\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.383650 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/8bff3f91-e831-4a15-a078-639483433b26-var-log\") pod \"ovn-controller-ovs-h6fvw\" (UID: \"8bff3f91-e831-4a15-a078-639483433b26\") " pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.383787 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/bd97ec8d-9d8f-4817-9770-d5392d4f60df-var-run-ovn\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.385274 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/bd97ec8d-9d8f-4817-9770-d5392d4f60df-scripts\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.385497 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8bff3f91-e831-4a15-a078-639483433b26-scripts\") pod \"ovn-controller-ovs-h6fvw\" (UID: \"8bff3f91-e831-4a15-a078-639483433b26\") " pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.393301 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bd97ec8d-9d8f-4817-9770-d5392d4f60df-combined-ca-bundle\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.394795 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/bd97ec8d-9d8f-4817-9770-d5392d4f60df-ovn-controller-tls-certs\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.417372 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2qs4\" (UniqueName: \"kubernetes.io/projected/bd97ec8d-9d8f-4817-9770-d5392d4f60df-kube-api-access-x2qs4\") pod \"ovn-controller-67lpp\" (UID: \"bd97ec8d-9d8f-4817-9770-d5392d4f60df\") " pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.418569 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvh7k\" (UniqueName: \"kubernetes.io/projected/8bff3f91-e831-4a15-a078-639483433b26-kube-api-access-hvh7k\") pod \"ovn-controller-ovs-h6fvw\" (UID: \"8bff3f91-e831-4a15-a078-639483433b26\") " pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.426559 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.433133 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.454636 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-hd7jk" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.455003 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.455010 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.455213 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.455382 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.474029 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.488081 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19046133-de5e-4303-b576-cd539ee5d3ae-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.488170 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/19046133-de5e-4303-b576-cd539ee5d3ae-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.488225 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/19046133-de5e-4303-b576-cd539ee5d3ae-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.488258 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jg6qr\" (UniqueName: \"kubernetes.io/projected/19046133-de5e-4303-b576-cd539ee5d3ae-kube-api-access-jg6qr\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.488318 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19046133-de5e-4303-b576-cd539ee5d3ae-config\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.488366 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/19046133-de5e-4303-b576-cd539ee5d3ae-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.488396 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/19046133-de5e-4303-b576-cd539ee5d3ae-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.488434 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.566517 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-67lpp" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.585904 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.590167 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19046133-de5e-4303-b576-cd539ee5d3ae-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.590230 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/19046133-de5e-4303-b576-cd539ee5d3ae-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.590403 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/19046133-de5e-4303-b576-cd539ee5d3ae-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.591753 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jg6qr\" (UniqueName: \"kubernetes.io/projected/19046133-de5e-4303-b576-cd539ee5d3ae-kube-api-access-jg6qr\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.592088 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19046133-de5e-4303-b576-cd539ee5d3ae-config\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.592338 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/19046133-de5e-4303-b576-cd539ee5d3ae-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.592378 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/19046133-de5e-4303-b576-cd539ee5d3ae-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.592415 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.592815 4906 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.594138 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/19046133-de5e-4303-b576-cd539ee5d3ae-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.595015 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/19046133-de5e-4303-b576-cd539ee5d3ae-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.596592 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19046133-de5e-4303-b576-cd539ee5d3ae-config\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.606899 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/19046133-de5e-4303-b576-cd539ee5d3ae-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.610116 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19046133-de5e-4303-b576-cd539ee5d3ae-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.613713 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jg6qr\" (UniqueName: \"kubernetes.io/projected/19046133-de5e-4303-b576-cd539ee5d3ae-kube-api-access-jg6qr\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.623817 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/19046133-de5e-4303-b576-cd539ee5d3ae-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.677030 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"ovsdbserver-nb-0\" (UID: \"19046133-de5e-4303-b576-cd539ee5d3ae\") " pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:12 crc kubenswrapper[4906]: I0227 08:48:12.811307 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Feb 27 08:48:14 crc kubenswrapper[4906]: I0227 08:48:14.453362 4906 scope.go:117] "RemoveContainer" containerID="128b5cb607dca913cf1abfcf132d391f417fce5401683ad6b3e9ad3dd355ba6f" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.602026 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.603949 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.612709 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-zcr5h" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.613057 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.612709 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.613550 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.621578 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.785360 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q69lt\" (UniqueName: \"kubernetes.io/projected/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-kube-api-access-q69lt\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.786345 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.786607 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.786801 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.786970 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5","Type":"ContainerStarted","Data":"443ffa2210eed630941173740dbf758c1635870cb8897a6823b716415a1ca308"} Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.787167 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.787279 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.787457 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-config\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.787590 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.889517 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.889577 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.889604 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.889643 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-config\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.889704 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.889895 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q69lt\" (UniqueName: \"kubernetes.io/projected/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-kube-api-access-q69lt\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.890177 4906 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.890695 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.890951 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-config\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.890990 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.891083 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.891252 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.898542 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.898583 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.898682 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.908603 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q69lt\" (UniqueName: \"kubernetes.io/projected/cfe60c7c-9d0a-488c-bb9a-9a1a8511437f-kube-api-access-q69lt\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.913052 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f\") " pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:15 crc kubenswrapper[4906]: I0227 08:48:15.937667 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Feb 27 08:48:25 crc kubenswrapper[4906]: E0227 08:48:25.707140 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Feb 27 08:48:25 crc kubenswrapper[4906]: E0227 08:48:25.709248 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kk5l8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:48:25 crc kubenswrapper[4906]: E0227 08:48:25.710549 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b" Feb 27 08:48:34 crc kubenswrapper[4906]: E0227 08:48:32.825914 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-galera-0" podUID="47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b" Feb 27 08:48:40 crc kubenswrapper[4906]: E0227 08:48:40.156183 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Feb 27 08:48:40 crc kubenswrapper[4906]: E0227 08:48:40.158826 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-75rbj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(288d23ef-ae52-4275-a827-ebf77b2823ea): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:48:40 crc kubenswrapper[4906]: E0227 08:48:40.160337 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="288d23ef-ae52-4275-a827-ebf77b2823ea" Feb 27 08:48:40 crc kubenswrapper[4906]: E0227 08:48:40.198505 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="288d23ef-ae52-4275-a827-ebf77b2823ea" Feb 27 08:48:40 crc kubenswrapper[4906]: E0227 08:48:40.213462 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Feb 27 08:48:40 crc kubenswrapper[4906]: E0227 08:48:40.214045 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-k4ktg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(960971c6-e3d1-458e-9991-91cbcbeb9d5e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:48:40 crc kubenswrapper[4906]: E0227 08:48:40.215503 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="960971c6-e3d1-458e-9991-91cbcbeb9d5e" Feb 27 08:48:41 crc kubenswrapper[4906]: E0227 08:48:41.207194 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="960971c6-e3d1-458e-9991-91cbcbeb9d5e" Feb 27 08:48:45 crc kubenswrapper[4906]: E0227 08:48:45.560500 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached:current-podified" Feb 27 08:48:45 crc kubenswrapper[4906]: E0227 08:48:45.561086 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:ndch5ffh695h644h98h58dh5c8h677h84h695h5c4h649h694h55fh5c4h85h5b7h87h544hf6h5fbhc4hb8h695h68h644hf8h688hfchd9h5c5h58fq,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ljm8j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(e678f55d-5f6a-4ce7-92f3-5a7b87803830): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:48:45 crc kubenswrapper[4906]: E0227 08:48:45.562316 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="e678f55d-5f6a-4ce7-92f3-5a7b87803830" Feb 27 08:48:45 crc kubenswrapper[4906]: E0227 08:48:45.576605 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying layer: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Feb 27 08:48:45 crc kubenswrapper[4906]: E0227 08:48:45.576640 4906 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying system image from manifest list: copying layer: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Feb 27 08:48:45 crc kubenswrapper[4906]: E0227 08:48:45.576761 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-state-metrics,Image:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,Command:[],Args:[--resources=pods --namespaces=openstack],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},ContainerPort{Name:telemetry,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rtpf6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod kube-state-metrics-0_openstack(eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying layer: context canceled" logger="UnhandledError" Feb 27 08:48:45 crc kubenswrapper[4906]: E0227 08:48:45.577942 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying layer: context canceled\"" pod="openstack/kube-state-metrics-0" podUID="eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5" Feb 27 08:48:46 crc kubenswrapper[4906]: E0227 08:48:46.276195 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0\\\"\"" pod="openstack/kube-state-metrics-0" podUID="eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5" Feb 27 08:48:46 crc kubenswrapper[4906]: E0227 08:48:46.277826 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached:current-podified\\\"\"" pod="openstack/memcached-0" podUID="e678f55d-5f6a-4ce7-92f3-5a7b87803830" Feb 27 08:48:46 crc kubenswrapper[4906]: E0227 08:48:46.540632 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Feb 27 08:48:46 crc kubenswrapper[4906]: E0227 08:48:46.540871 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5wxqt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-f92xb_openstack(1a561d8e-97e1-4f2f-8143-67411c643b19): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:48:46 crc kubenswrapper[4906]: E0227 08:48:46.542286 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-f92xb" podUID="1a561d8e-97e1-4f2f-8143-67411c643b19" Feb 27 08:48:46 crc kubenswrapper[4906]: E0227 08:48:46.558393 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Feb 27 08:48:46 crc kubenswrapper[4906]: E0227 08:48:46.558639 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nfdh5dfhb6h64h676hc4h78h97h669h54chfbh696hb5h54bh5d4h6bh64h644h677h584h5cbh698h9dh5bbh5f8h5b8hcdh644h5c7h694hbfh589q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f6qdn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-5ccc8479f9-4m2sq_openstack(8b70ee3a-8626-4cee-b807-b9fe7aebac31): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:48:46 crc kubenswrapper[4906]: E0227 08:48:46.560173 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" podUID="8b70ee3a-8626-4cee-b807-b9fe7aebac31" Feb 27 08:48:46 crc kubenswrapper[4906]: E0227 08:48:46.560287 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Feb 27 08:48:46 crc kubenswrapper[4906]: E0227 08:48:46.560389 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-t9npr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-nt4fx_openstack(d1bda1b5-8590-4e63-9676-25f6c58f072d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:48:46 crc kubenswrapper[4906]: E0227 08:48:46.564762 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" podUID="d1bda1b5-8590-4e63-9676-25f6c58f072d" Feb 27 08:48:46 crc kubenswrapper[4906]: E0227 08:48:46.573327 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Feb 27 08:48:46 crc kubenswrapper[4906]: E0227 08:48:46.576705 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h27hn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-m7cpb_openstack(c537a85f-4479-417a-bbb0-4a3c603eac2d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:48:46 crc kubenswrapper[4906]: E0227 08:48:46.578040 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-m7cpb" podUID="c537a85f-4479-417a-bbb0-4a3c603eac2d" Feb 27 08:48:46 crc kubenswrapper[4906]: I0227 08:48:46.994225 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-67lpp"] Feb 27 08:48:47 crc kubenswrapper[4906]: I0227 08:48:47.229844 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Feb 27 08:48:47 crc kubenswrapper[4906]: I0227 08:48:47.284587 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-67lpp" event={"ID":"bd97ec8d-9d8f-4817-9770-d5392d4f60df","Type":"ContainerStarted","Data":"f680ff783b75e2b364c76d88641333e806f48ebfbc08e32dd2518530ab2d3888"} Feb 27 08:48:47 crc kubenswrapper[4906]: I0227 08:48:47.286933 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"19046133-de5e-4303-b576-cd539ee5d3ae","Type":"ContainerStarted","Data":"278b257bf90910c750748b8729ccab028e8e179950822c281cc07419c8ec35ff"} Feb 27 08:48:47 crc kubenswrapper[4906]: E0227 08:48:47.291133 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" podUID="8b70ee3a-8626-4cee-b807-b9fe7aebac31" Feb 27 08:48:47 crc kubenswrapper[4906]: E0227 08:48:47.291413 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" podUID="d1bda1b5-8590-4e63-9676-25f6c58f072d" Feb 27 08:48:47 crc kubenswrapper[4906]: I0227 08:48:47.309674 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-h6fvw"] Feb 27 08:48:47 crc kubenswrapper[4906]: W0227 08:48:47.323498 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8bff3f91_e831_4a15_a078_639483433b26.slice/crio-28632412811dddd155554d9919f9fce49cf57a212a4e0e55544a106d52510efe WatchSource:0}: Error finding container 28632412811dddd155554d9919f9fce49cf57a212a4e0e55544a106d52510efe: Status 404 returned error can't find the container with id 28632412811dddd155554d9919f9fce49cf57a212a4e0e55544a106d52510efe Feb 27 08:48:47 crc kubenswrapper[4906]: I0227 08:48:47.929746 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-m7cpb" Feb 27 08:48:47 crc kubenswrapper[4906]: I0227 08:48:47.940028 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-f92xb" Feb 27 08:48:47 crc kubenswrapper[4906]: I0227 08:48:47.986582 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Feb 27 08:48:48 crc kubenswrapper[4906]: W0227 08:48:48.012451 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcfe60c7c_9d0a_488c_bb9a_9a1a8511437f.slice/crio-39b38cc112a67d096d0a711a2f13a837c1486baf87ac7ee8183ad8e17b725b04 WatchSource:0}: Error finding container 39b38cc112a67d096d0a711a2f13a837c1486baf87ac7ee8183ad8e17b725b04: Status 404 returned error can't find the container with id 39b38cc112a67d096d0a711a2f13a837c1486baf87ac7ee8183ad8e17b725b04 Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.056200 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wxqt\" (UniqueName: \"kubernetes.io/projected/1a561d8e-97e1-4f2f-8143-67411c643b19-kube-api-access-5wxqt\") pod \"1a561d8e-97e1-4f2f-8143-67411c643b19\" (UID: \"1a561d8e-97e1-4f2f-8143-67411c643b19\") " Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.056376 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c537a85f-4479-417a-bbb0-4a3c603eac2d-config\") pod \"c537a85f-4479-417a-bbb0-4a3c603eac2d\" (UID: \"c537a85f-4479-417a-bbb0-4a3c603eac2d\") " Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.056424 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a561d8e-97e1-4f2f-8143-67411c643b19-dns-svc\") pod \"1a561d8e-97e1-4f2f-8143-67411c643b19\" (UID: \"1a561d8e-97e1-4f2f-8143-67411c643b19\") " Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.056499 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h27hn\" (UniqueName: \"kubernetes.io/projected/c537a85f-4479-417a-bbb0-4a3c603eac2d-kube-api-access-h27hn\") pod \"c537a85f-4479-417a-bbb0-4a3c603eac2d\" (UID: \"c537a85f-4479-417a-bbb0-4a3c603eac2d\") " Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.057193 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c537a85f-4479-417a-bbb0-4a3c603eac2d-config" (OuterVolumeSpecName: "config") pod "c537a85f-4479-417a-bbb0-4a3c603eac2d" (UID: "c537a85f-4479-417a-bbb0-4a3c603eac2d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.057528 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a561d8e-97e1-4f2f-8143-67411c643b19-config\") pod \"1a561d8e-97e1-4f2f-8143-67411c643b19\" (UID: \"1a561d8e-97e1-4f2f-8143-67411c643b19\") " Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.058163 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a561d8e-97e1-4f2f-8143-67411c643b19-config" (OuterVolumeSpecName: "config") pod "1a561d8e-97e1-4f2f-8143-67411c643b19" (UID: "1a561d8e-97e1-4f2f-8143-67411c643b19"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.058340 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1a561d8e-97e1-4f2f-8143-67411c643b19-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1a561d8e-97e1-4f2f-8143-67411c643b19" (UID: "1a561d8e-97e1-4f2f-8143-67411c643b19"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.058669 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c537a85f-4479-417a-bbb0-4a3c603eac2d-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.058693 4906 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1a561d8e-97e1-4f2f-8143-67411c643b19-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.058704 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a561d8e-97e1-4f2f-8143-67411c643b19-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.064495 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a561d8e-97e1-4f2f-8143-67411c643b19-kube-api-access-5wxqt" (OuterVolumeSpecName: "kube-api-access-5wxqt") pod "1a561d8e-97e1-4f2f-8143-67411c643b19" (UID: "1a561d8e-97e1-4f2f-8143-67411c643b19"). InnerVolumeSpecName "kube-api-access-5wxqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.075833 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c537a85f-4479-417a-bbb0-4a3c603eac2d-kube-api-access-h27hn" (OuterVolumeSpecName: "kube-api-access-h27hn") pod "c537a85f-4479-417a-bbb0-4a3c603eac2d" (UID: "c537a85f-4479-417a-bbb0-4a3c603eac2d"). InnerVolumeSpecName "kube-api-access-h27hn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.160837 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h27hn\" (UniqueName: \"kubernetes.io/projected/c537a85f-4479-417a-bbb0-4a3c603eac2d-kube-api-access-h27hn\") on node \"crc\" DevicePath \"\"" Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.160869 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wxqt\" (UniqueName: \"kubernetes.io/projected/1a561d8e-97e1-4f2f-8143-67411c643b19-kube-api-access-5wxqt\") on node \"crc\" DevicePath \"\"" Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.357314 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f","Type":"ContainerStarted","Data":"39b38cc112a67d096d0a711a2f13a837c1486baf87ac7ee8183ad8e17b725b04"} Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.359446 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-m7cpb" event={"ID":"c537a85f-4479-417a-bbb0-4a3c603eac2d","Type":"ContainerDied","Data":"8a885ec781837924b7d3276ca19dbc1d641b8c0fd0019810e5a0df7aa7851efe"} Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.359556 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-m7cpb" Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.364042 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-h6fvw" event={"ID":"8bff3f91-e831-4a15-a078-639483433b26","Type":"ContainerStarted","Data":"28632412811dddd155554d9919f9fce49cf57a212a4e0e55544a106d52510efe"} Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.366794 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-f92xb" event={"ID":"1a561d8e-97e1-4f2f-8143-67411c643b19","Type":"ContainerDied","Data":"48ba3bf29b85bcb51f075a11fa15e2952810d7903809cb6a17db965ae37bba1f"} Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.366977 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-f92xb" Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.375814 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"752c995e-5d01-4705-ab26-be06da61290d","Type":"ContainerStarted","Data":"6767fed6a0f4f2867823c279799ee9b4addfbdd810330c7bd2d488d2b5313690"} Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.464365 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-f92xb"] Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.472440 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-f92xb"] Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.495217 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-m7cpb"] Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.502625 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-m7cpb"] Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.565157 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a561d8e-97e1-4f2f-8143-67411c643b19" path="/var/lib/kubelet/pods/1a561d8e-97e1-4f2f-8143-67411c643b19/volumes" Feb 27 08:48:48 crc kubenswrapper[4906]: I0227 08:48:48.565625 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c537a85f-4479-417a-bbb0-4a3c603eac2d" path="/var/lib/kubelet/pods/c537a85f-4479-417a-bbb0-4a3c603eac2d/volumes" Feb 27 08:48:49 crc kubenswrapper[4906]: I0227 08:48:49.391395 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b","Type":"ContainerStarted","Data":"1c163bf93244847308915b091d5c1942d8661d905f53fe44bb25100e6ea12929"} Feb 27 08:48:52 crc kubenswrapper[4906]: I0227 08:48:52.415767 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f","Type":"ContainerStarted","Data":"98fe9192c94be9fd40bf2f0d24caea30414bd84030ea77c2fbb472a8282ac9b2"} Feb 27 08:48:52 crc kubenswrapper[4906]: I0227 08:48:52.418926 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"19046133-de5e-4303-b576-cd539ee5d3ae","Type":"ContainerStarted","Data":"0f5bea04048576a634bdebb796b3636bf1201ac2c181ed039207744155b0fce6"} Feb 27 08:48:52 crc kubenswrapper[4906]: I0227 08:48:52.421820 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-h6fvw" event={"ID":"8bff3f91-e831-4a15-a078-639483433b26","Type":"ContainerStarted","Data":"086476de1fb9d509e4943316a9a2d93e063a52f4c78ddff50816fbfad52832b2"} Feb 27 08:48:53 crc kubenswrapper[4906]: I0227 08:48:53.439092 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-67lpp" event={"ID":"bd97ec8d-9d8f-4817-9770-d5392d4f60df","Type":"ContainerStarted","Data":"ca00d76d5091d7f8fb914d5d2f835022976b7fb3a90d8760a45d267b16986326"} Feb 27 08:48:53 crc kubenswrapper[4906]: I0227 08:48:53.439691 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-67lpp" Feb 27 08:48:53 crc kubenswrapper[4906]: I0227 08:48:53.447270 4906 generic.go:334] "Generic (PLEG): container finished" podID="8bff3f91-e831-4a15-a078-639483433b26" containerID="086476de1fb9d509e4943316a9a2d93e063a52f4c78ddff50816fbfad52832b2" exitCode=0 Feb 27 08:48:53 crc kubenswrapper[4906]: I0227 08:48:53.447360 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-h6fvw" event={"ID":"8bff3f91-e831-4a15-a078-639483433b26","Type":"ContainerDied","Data":"086476de1fb9d509e4943316a9a2d93e063a52f4c78ddff50816fbfad52832b2"} Feb 27 08:48:53 crc kubenswrapper[4906]: I0227 08:48:53.451048 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"960971c6-e3d1-458e-9991-91cbcbeb9d5e","Type":"ContainerStarted","Data":"3973094fa188ae5e03bf49fdbc0cfa51659f61090fd14533acbac52811743c32"} Feb 27 08:48:53 crc kubenswrapper[4906]: I0227 08:48:53.456569 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"288d23ef-ae52-4275-a827-ebf77b2823ea","Type":"ContainerStarted","Data":"648e3776fe4d7a47348f9297e5b57825d7f70d4d2e766a296dfac824e916eb9b"} Feb 27 08:48:53 crc kubenswrapper[4906]: I0227 08:48:53.465226 4906 generic.go:334] "Generic (PLEG): container finished" podID="47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b" containerID="1c163bf93244847308915b091d5c1942d8661d905f53fe44bb25100e6ea12929" exitCode=0 Feb 27 08:48:53 crc kubenswrapper[4906]: I0227 08:48:53.465284 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b","Type":"ContainerDied","Data":"1c163bf93244847308915b091d5c1942d8661d905f53fe44bb25100e6ea12929"} Feb 27 08:48:53 crc kubenswrapper[4906]: I0227 08:48:53.498332 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-67lpp" podStartSLOduration=36.479468047 podStartE2EDuration="41.498307591s" podCreationTimestamp="2026-02-27 08:48:12 +0000 UTC" firstStartedPulling="2026-02-27 08:48:47.021582493 +0000 UTC m=+1225.415984103" lastFinishedPulling="2026-02-27 08:48:52.040422037 +0000 UTC m=+1230.434823647" observedRunningTime="2026-02-27 08:48:53.46209868 +0000 UTC m=+1231.856500310" watchObservedRunningTime="2026-02-27 08:48:53.498307591 +0000 UTC m=+1231.892709201" Feb 27 08:48:54 crc kubenswrapper[4906]: I0227 08:48:54.477782 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-h6fvw" event={"ID":"8bff3f91-e831-4a15-a078-639483433b26","Type":"ContainerStarted","Data":"8efc1559154fb692aff24a35538f0a923d1676a0d27b9fbb8212caa886f20650"} Feb 27 08:48:54 crc kubenswrapper[4906]: I0227 08:48:54.478484 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:54 crc kubenswrapper[4906]: I0227 08:48:54.478499 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-h6fvw" event={"ID":"8bff3f91-e831-4a15-a078-639483433b26","Type":"ContainerStarted","Data":"1181f263dd36917dc340fcb0e0ef75f322dab741f0576afe3c0abcac1b2a2c1b"} Feb 27 08:48:54 crc kubenswrapper[4906]: I0227 08:48:54.480393 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b","Type":"ContainerStarted","Data":"221d4919013167f84a24cd1f43e149ceead9806028c172e6200e3566c953e558"} Feb 27 08:48:54 crc kubenswrapper[4906]: I0227 08:48:54.510297 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-h6fvw" podStartSLOduration=37.80039507 podStartE2EDuration="42.510277002s" podCreationTimestamp="2026-02-27 08:48:12 +0000 UTC" firstStartedPulling="2026-02-27 08:48:47.33075262 +0000 UTC m=+1225.725154230" lastFinishedPulling="2026-02-27 08:48:52.040634552 +0000 UTC m=+1230.435036162" observedRunningTime="2026-02-27 08:48:54.504941541 +0000 UTC m=+1232.899343161" watchObservedRunningTime="2026-02-27 08:48:54.510277002 +0000 UTC m=+1232.904678612" Feb 27 08:48:54 crc kubenswrapper[4906]: I0227 08:48:54.546136 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=9.459307935 podStartE2EDuration="51.546108972s" podCreationTimestamp="2026-02-27 08:48:03 +0000 UTC" firstStartedPulling="2026-02-27 08:48:05.982142634 +0000 UTC m=+1184.376544244" lastFinishedPulling="2026-02-27 08:48:48.068943671 +0000 UTC m=+1226.463345281" observedRunningTime="2026-02-27 08:48:54.543653778 +0000 UTC m=+1232.938055388" watchObservedRunningTime="2026-02-27 08:48:54.546108972 +0000 UTC m=+1232.940510582" Feb 27 08:48:55 crc kubenswrapper[4906]: I0227 08:48:55.412500 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Feb 27 08:48:55 crc kubenswrapper[4906]: I0227 08:48:55.412937 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Feb 27 08:48:55 crc kubenswrapper[4906]: I0227 08:48:55.496113 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:48:57 crc kubenswrapper[4906]: I0227 08:48:57.523060 4906 generic.go:334] "Generic (PLEG): container finished" podID="960971c6-e3d1-458e-9991-91cbcbeb9d5e" containerID="3973094fa188ae5e03bf49fdbc0cfa51659f61090fd14533acbac52811743c32" exitCode=0 Feb 27 08:48:57 crc kubenswrapper[4906]: I0227 08:48:57.523167 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"960971c6-e3d1-458e-9991-91cbcbeb9d5e","Type":"ContainerDied","Data":"3973094fa188ae5e03bf49fdbc0cfa51659f61090fd14533acbac52811743c32"} Feb 27 08:48:58 crc kubenswrapper[4906]: I0227 08:48:58.538907 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"960971c6-e3d1-458e-9991-91cbcbeb9d5e","Type":"ContainerStarted","Data":"b69f814b45e3821fe8ea9322efef92ca9b8a342e7783c396535048dfe7649c3c"} Feb 27 08:48:58 crc kubenswrapper[4906]: I0227 08:48:58.543904 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"cfe60c7c-9d0a-488c-bb9a-9a1a8511437f","Type":"ContainerStarted","Data":"99f13a1c2e20edcb78b9bcf37dac688bf51e21022d66bbfd290ef02497b55497"} Feb 27 08:48:58 crc kubenswrapper[4906]: I0227 08:48:58.547610 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"19046133-de5e-4303-b576-cd539ee5d3ae","Type":"ContainerStarted","Data":"bc856b4bf03e2b1c148aa61a06e9d7d7eea145c2b7e301c40ce0b70c6fff7a3e"} Feb 27 08:48:58 crc kubenswrapper[4906]: I0227 08:48:58.567741 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=-9223371983.287073 podStartE2EDuration="53.567702526s" podCreationTimestamp="2026-02-27 08:48:05 +0000 UTC" firstStartedPulling="2026-02-27 08:48:07.44466611 +0000 UTC m=+1185.839067720" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:48:58.565743365 +0000 UTC m=+1236.960144995" watchObservedRunningTime="2026-02-27 08:48:58.567702526 +0000 UTC m=+1236.962104136" Feb 27 08:48:58 crc kubenswrapper[4906]: I0227 08:48:58.645542 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=37.392265939 podStartE2EDuration="47.645516609s" podCreationTimestamp="2026-02-27 08:48:11 +0000 UTC" firstStartedPulling="2026-02-27 08:48:47.24731542 +0000 UTC m=+1225.641717030" lastFinishedPulling="2026-02-27 08:48:57.50056609 +0000 UTC m=+1235.894967700" observedRunningTime="2026-02-27 08:48:58.64440494 +0000 UTC m=+1237.038806570" watchObservedRunningTime="2026-02-27 08:48:58.645516609 +0000 UTC m=+1237.039918219" Feb 27 08:48:58 crc kubenswrapper[4906]: I0227 08:48:58.677582 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=35.171754743 podStartE2EDuration="44.67755099s" podCreationTimestamp="2026-02-27 08:48:14 +0000 UTC" firstStartedPulling="2026-02-27 08:48:48.01484315 +0000 UTC m=+1226.409244760" lastFinishedPulling="2026-02-27 08:48:57.520639397 +0000 UTC m=+1235.915041007" observedRunningTime="2026-02-27 08:48:58.668820281 +0000 UTC m=+1237.063221891" watchObservedRunningTime="2026-02-27 08:48:58.67755099 +0000 UTC m=+1237.071952600" Feb 27 08:48:59 crc kubenswrapper[4906]: I0227 08:48:59.507639 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Feb 27 08:48:59 crc kubenswrapper[4906]: I0227 08:48:59.556415 4906 generic.go:334] "Generic (PLEG): container finished" podID="d1bda1b5-8590-4e63-9676-25f6c58f072d" containerID="30c93dfad1f536284e584668e868f4877f4460d87fdc59b92ccd77dabcb12db5" exitCode=0 Feb 27 08:48:59 crc kubenswrapper[4906]: I0227 08:48:59.556501 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" event={"ID":"d1bda1b5-8590-4e63-9676-25f6c58f072d","Type":"ContainerDied","Data":"30c93dfad1f536284e584668e868f4877f4460d87fdc59b92ccd77dabcb12db5"} Feb 27 08:48:59 crc kubenswrapper[4906]: I0227 08:48:59.559746 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"e678f55d-5f6a-4ce7-92f3-5a7b87803830","Type":"ContainerStarted","Data":"d41938d41d598f8e5f5979ec9d8e79e57a721300f4e3cf133022de2f586ccf97"} Feb 27 08:48:59 crc kubenswrapper[4906]: I0227 08:48:59.560060 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Feb 27 08:48:59 crc kubenswrapper[4906]: I0227 08:48:59.561771 4906 generic.go:334] "Generic (PLEG): container finished" podID="8b70ee3a-8626-4cee-b807-b9fe7aebac31" containerID="457e9cd41f01d1409cd6bc1eba65e39cbf9ab4bf9582318a65b6641d936a9368" exitCode=0 Feb 27 08:48:59 crc kubenswrapper[4906]: I0227 08:48:59.561956 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" event={"ID":"8b70ee3a-8626-4cee-b807-b9fe7aebac31","Type":"ContainerDied","Data":"457e9cd41f01d1409cd6bc1eba65e39cbf9ab4bf9582318a65b6641d936a9368"} Feb 27 08:48:59 crc kubenswrapper[4906]: I0227 08:48:59.598209 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Feb 27 08:48:59 crc kubenswrapper[4906]: I0227 08:48:59.657584 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.998397121 podStartE2EDuration="53.657561849s" podCreationTimestamp="2026-02-27 08:48:06 +0000 UTC" firstStartedPulling="2026-02-27 08:48:07.815053124 +0000 UTC m=+1186.209454734" lastFinishedPulling="2026-02-27 08:48:58.474217852 +0000 UTC m=+1236.868619462" observedRunningTime="2026-02-27 08:48:59.616331177 +0000 UTC m=+1238.010732797" watchObservedRunningTime="2026-02-27 08:48:59.657561849 +0000 UTC m=+1238.051963489" Feb 27 08:49:00 crc kubenswrapper[4906]: I0227 08:49:00.572680 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" event={"ID":"8b70ee3a-8626-4cee-b807-b9fe7aebac31","Type":"ContainerStarted","Data":"5ada162db631d2362f8bf4c1373fe65c23dd355ea9f3a74b0fadcdf24baffa77"} Feb 27 08:49:00 crc kubenswrapper[4906]: I0227 08:49:00.579832 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" event={"ID":"d1bda1b5-8590-4e63-9676-25f6c58f072d","Type":"ContainerStarted","Data":"2e580d0a3fa866789da378af5bba839931ce3563775e2727ee0a88ab54ee6aa4"} Feb 27 08:49:00 crc kubenswrapper[4906]: I0227 08:49:00.580680 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" Feb 27 08:49:00 crc kubenswrapper[4906]: I0227 08:49:00.580812 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" Feb 27 08:49:00 crc kubenswrapper[4906]: I0227 08:49:00.609554 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" podStartSLOduration=3.437257986 podStartE2EDuration="58.609532741s" podCreationTimestamp="2026-02-27 08:48:02 +0000 UTC" firstStartedPulling="2026-02-27 08:48:03.920470616 +0000 UTC m=+1182.314872226" lastFinishedPulling="2026-02-27 08:48:59.092745371 +0000 UTC m=+1237.487146981" observedRunningTime="2026-02-27 08:49:00.604956841 +0000 UTC m=+1238.999358461" watchObservedRunningTime="2026-02-27 08:49:00.609532741 +0000 UTC m=+1239.003934351" Feb 27 08:49:00 crc kubenswrapper[4906]: I0227 08:49:00.631507 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" podStartSLOduration=3.109273956 podStartE2EDuration="58.631475107s" podCreationTimestamp="2026-02-27 08:48:02 +0000 UTC" firstStartedPulling="2026-02-27 08:48:03.570898849 +0000 UTC m=+1181.965300459" lastFinishedPulling="2026-02-27 08:48:59.0931 +0000 UTC m=+1237.487501610" observedRunningTime="2026-02-27 08:49:00.629764733 +0000 UTC m=+1239.024166353" watchObservedRunningTime="2026-02-27 08:49:00.631475107 +0000 UTC m=+1239.025876717" Feb 27 08:49:00 crc kubenswrapper[4906]: I0227 08:49:00.812321 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Feb 27 08:49:00 crc kubenswrapper[4906]: I0227 08:49:00.871380 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Feb 27 08:49:00 crc kubenswrapper[4906]: I0227 08:49:00.938962 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Feb 27 08:49:00 crc kubenswrapper[4906]: I0227 08:49:00.939020 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Feb 27 08:49:00 crc kubenswrapper[4906]: I0227 08:49:00.983355 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Feb 27 08:49:01 crc kubenswrapper[4906]: I0227 08:49:01.587805 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Feb 27 08:49:01 crc kubenswrapper[4906]: I0227 08:49:01.653947 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Feb 27 08:49:01 crc kubenswrapper[4906]: I0227 08:49:01.660749 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Feb 27 08:49:01 crc kubenswrapper[4906]: I0227 08:49:01.893131 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-nt4fx"] Feb 27 08:49:01 crc kubenswrapper[4906]: I0227 08:49:01.955315 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-r4bd9"] Feb 27 08:49:01 crc kubenswrapper[4906]: I0227 08:49:01.956979 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" Feb 27 08:49:01 crc kubenswrapper[4906]: I0227 08:49:01.969743 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Feb 27 08:49:01 crc kubenswrapper[4906]: I0227 08:49:01.973221 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-7zlfd"] Feb 27 08:49:01 crc kubenswrapper[4906]: I0227 08:49:01.974826 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-7zlfd" Feb 27 08:49:01 crc kubenswrapper[4906]: I0227 08:49:01.979557 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Feb 27 08:49:01 crc kubenswrapper[4906]: I0227 08:49:01.984181 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-7zlfd"] Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.001832 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-r4bd9"] Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.060827 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w72c6\" (UniqueName: \"kubernetes.io/projected/03e70640-9aec-465b-9141-7944e2a7aeb1-kube-api-access-w72c6\") pod \"ovn-controller-metrics-7zlfd\" (UID: \"03e70640-9aec-465b-9141-7944e2a7aeb1\") " pod="openstack/ovn-controller-metrics-7zlfd" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.061262 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/03e70640-9aec-465b-9141-7944e2a7aeb1-ovs-rundir\") pod \"ovn-controller-metrics-7zlfd\" (UID: \"03e70640-9aec-465b-9141-7944e2a7aeb1\") " pod="openstack/ovn-controller-metrics-7zlfd" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.061420 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-config\") pod \"dnsmasq-dns-7fd796d7df-r4bd9\" (UID: \"9364b7c4-2498-4db2-b3ad-f4c229c03ee5\") " pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.061559 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-r4bd9\" (UID: \"9364b7c4-2498-4db2-b3ad-f4c229c03ee5\") " pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.061646 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/03e70640-9aec-465b-9141-7944e2a7aeb1-ovn-rundir\") pod \"ovn-controller-metrics-7zlfd\" (UID: \"03e70640-9aec-465b-9141-7944e2a7aeb1\") " pod="openstack/ovn-controller-metrics-7zlfd" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.061723 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-r4bd9\" (UID: \"9364b7c4-2498-4db2-b3ad-f4c229c03ee5\") " pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.061798 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/03e70640-9aec-465b-9141-7944e2a7aeb1-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-7zlfd\" (UID: \"03e70640-9aec-465b-9141-7944e2a7aeb1\") " pod="openstack/ovn-controller-metrics-7zlfd" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.062110 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03e70640-9aec-465b-9141-7944e2a7aeb1-combined-ca-bundle\") pod \"ovn-controller-metrics-7zlfd\" (UID: \"03e70640-9aec-465b-9141-7944e2a7aeb1\") " pod="openstack/ovn-controller-metrics-7zlfd" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.062226 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03e70640-9aec-465b-9141-7944e2a7aeb1-config\") pod \"ovn-controller-metrics-7zlfd\" (UID: \"03e70640-9aec-465b-9141-7944e2a7aeb1\") " pod="openstack/ovn-controller-metrics-7zlfd" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.062254 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjxvj\" (UniqueName: \"kubernetes.io/projected/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-kube-api-access-cjxvj\") pod \"dnsmasq-dns-7fd796d7df-r4bd9\" (UID: \"9364b7c4-2498-4db2-b3ad-f4c229c03ee5\") " pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.121980 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-4m2sq"] Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.130227 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.131738 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.135618 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.136969 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.137583 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-p9ndz" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.142781 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.147254 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.169653 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-tbrl8"] Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.172998 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03e70640-9aec-465b-9141-7944e2a7aeb1-combined-ca-bundle\") pod \"ovn-controller-metrics-7zlfd\" (UID: \"03e70640-9aec-465b-9141-7944e2a7aeb1\") " pod="openstack/ovn-controller-metrics-7zlfd" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.173090 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03e70640-9aec-465b-9141-7944e2a7aeb1-config\") pod \"ovn-controller-metrics-7zlfd\" (UID: \"03e70640-9aec-465b-9141-7944e2a7aeb1\") " pod="openstack/ovn-controller-metrics-7zlfd" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.173118 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjxvj\" (UniqueName: \"kubernetes.io/projected/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-kube-api-access-cjxvj\") pod \"dnsmasq-dns-7fd796d7df-r4bd9\" (UID: \"9364b7c4-2498-4db2-b3ad-f4c229c03ee5\") " pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.173175 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w72c6\" (UniqueName: \"kubernetes.io/projected/03e70640-9aec-465b-9141-7944e2a7aeb1-kube-api-access-w72c6\") pod \"ovn-controller-metrics-7zlfd\" (UID: \"03e70640-9aec-465b-9141-7944e2a7aeb1\") " pod="openstack/ovn-controller-metrics-7zlfd" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.173212 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/03e70640-9aec-465b-9141-7944e2a7aeb1-ovs-rundir\") pod \"ovn-controller-metrics-7zlfd\" (UID: \"03e70640-9aec-465b-9141-7944e2a7aeb1\") " pod="openstack/ovn-controller-metrics-7zlfd" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.173253 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-config\") pod \"dnsmasq-dns-7fd796d7df-r4bd9\" (UID: \"9364b7c4-2498-4db2-b3ad-f4c229c03ee5\") " pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.173324 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-r4bd9\" (UID: \"9364b7c4-2498-4db2-b3ad-f4c229c03ee5\") " pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.173347 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/03e70640-9aec-465b-9141-7944e2a7aeb1-ovn-rundir\") pod \"ovn-controller-metrics-7zlfd\" (UID: \"03e70640-9aec-465b-9141-7944e2a7aeb1\") " pod="openstack/ovn-controller-metrics-7zlfd" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.173371 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/03e70640-9aec-465b-9141-7944e2a7aeb1-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-7zlfd\" (UID: \"03e70640-9aec-465b-9141-7944e2a7aeb1\") " pod="openstack/ovn-controller-metrics-7zlfd" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.174935 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.175231 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-config\") pod \"dnsmasq-dns-7fd796d7df-r4bd9\" (UID: \"9364b7c4-2498-4db2-b3ad-f4c229c03ee5\") " pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.175576 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/03e70640-9aec-465b-9141-7944e2a7aeb1-ovs-rundir\") pod \"ovn-controller-metrics-7zlfd\" (UID: \"03e70640-9aec-465b-9141-7944e2a7aeb1\") " pod="openstack/ovn-controller-metrics-7zlfd" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.175848 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-r4bd9\" (UID: \"9364b7c4-2498-4db2-b3ad-f4c229c03ee5\") " pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.176121 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/03e70640-9aec-465b-9141-7944e2a7aeb1-ovn-rundir\") pod \"ovn-controller-metrics-7zlfd\" (UID: \"03e70640-9aec-465b-9141-7944e2a7aeb1\") " pod="openstack/ovn-controller-metrics-7zlfd" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.179197 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.179300 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03e70640-9aec-465b-9141-7944e2a7aeb1-config\") pod \"ovn-controller-metrics-7zlfd\" (UID: \"03e70640-9aec-465b-9141-7944e2a7aeb1\") " pod="openstack/ovn-controller-metrics-7zlfd" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.180282 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-r4bd9\" (UID: \"9364b7c4-2498-4db2-b3ad-f4c229c03ee5\") " pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.186546 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/03e70640-9aec-465b-9141-7944e2a7aeb1-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-7zlfd\" (UID: \"03e70640-9aec-465b-9141-7944e2a7aeb1\") " pod="openstack/ovn-controller-metrics-7zlfd" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.192613 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-r4bd9\" (UID: \"9364b7c4-2498-4db2-b3ad-f4c229c03ee5\") " pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.207323 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjxvj\" (UniqueName: \"kubernetes.io/projected/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-kube-api-access-cjxvj\") pod \"dnsmasq-dns-7fd796d7df-r4bd9\" (UID: \"9364b7c4-2498-4db2-b3ad-f4c229c03ee5\") " pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.216243 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/03e70640-9aec-465b-9141-7944e2a7aeb1-combined-ca-bundle\") pod \"ovn-controller-metrics-7zlfd\" (UID: \"03e70640-9aec-465b-9141-7944e2a7aeb1\") " pod="openstack/ovn-controller-metrics-7zlfd" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.218839 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-tbrl8"] Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.221546 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w72c6\" (UniqueName: \"kubernetes.io/projected/03e70640-9aec-465b-9141-7944e2a7aeb1-kube-api-access-w72c6\") pod \"ovn-controller-metrics-7zlfd\" (UID: \"03e70640-9aec-465b-9141-7944e2a7aeb1\") " pod="openstack/ovn-controller-metrics-7zlfd" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.278449 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/24bec749-145e-48db-b5f8-ae7b57a5aaa3-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.278974 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/24bec749-145e-48db-b5f8-ae7b57a5aaa3-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.279200 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-tbrl8\" (UID: \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\") " pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.279368 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-tbrl8\" (UID: \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\") " pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.279550 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-config\") pod \"dnsmasq-dns-86db49b7ff-tbrl8\" (UID: \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\") " pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.279682 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-774dj\" (UniqueName: \"kubernetes.io/projected/24bec749-145e-48db-b5f8-ae7b57a5aaa3-kube-api-access-774dj\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.279772 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/24bec749-145e-48db-b5f8-ae7b57a5aaa3-scripts\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.279946 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-tbrl8\" (UID: \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\") " pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.280050 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24bec749-145e-48db-b5f8-ae7b57a5aaa3-config\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.280131 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24bec749-145e-48db-b5f8-ae7b57a5aaa3-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.280217 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kqf5\" (UniqueName: \"kubernetes.io/projected/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-kube-api-access-4kqf5\") pod \"dnsmasq-dns-86db49b7ff-tbrl8\" (UID: \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\") " pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.280322 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/24bec749-145e-48db-b5f8-ae7b57a5aaa3-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.309161 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.319233 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-7zlfd" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.383018 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24bec749-145e-48db-b5f8-ae7b57a5aaa3-config\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.383190 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24bec749-145e-48db-b5f8-ae7b57a5aaa3-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.383277 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kqf5\" (UniqueName: \"kubernetes.io/projected/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-kube-api-access-4kqf5\") pod \"dnsmasq-dns-86db49b7ff-tbrl8\" (UID: \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\") " pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.383375 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/24bec749-145e-48db-b5f8-ae7b57a5aaa3-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.383536 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/24bec749-145e-48db-b5f8-ae7b57a5aaa3-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.383627 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/24bec749-145e-48db-b5f8-ae7b57a5aaa3-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.383735 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-tbrl8\" (UID: \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\") " pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.383827 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-tbrl8\" (UID: \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\") " pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.383959 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-config\") pod \"dnsmasq-dns-86db49b7ff-tbrl8\" (UID: \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\") " pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.384069 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-774dj\" (UniqueName: \"kubernetes.io/projected/24bec749-145e-48db-b5f8-ae7b57a5aaa3-kube-api-access-774dj\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.384147 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/24bec749-145e-48db-b5f8-ae7b57a5aaa3-scripts\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.384272 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-tbrl8\" (UID: \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\") " pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.385266 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-tbrl8\" (UID: \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\") " pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.386244 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-config\") pod \"dnsmasq-dns-86db49b7ff-tbrl8\" (UID: \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\") " pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.386636 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-tbrl8\" (UID: \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\") " pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.387397 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-tbrl8\" (UID: \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\") " pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.388534 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/24bec749-145e-48db-b5f8-ae7b57a5aaa3-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.389176 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/24bec749-145e-48db-b5f8-ae7b57a5aaa3-scripts\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.390355 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/24bec749-145e-48db-b5f8-ae7b57a5aaa3-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.390567 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24bec749-145e-48db-b5f8-ae7b57a5aaa3-config\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.391415 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/24bec749-145e-48db-b5f8-ae7b57a5aaa3-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.391727 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/24bec749-145e-48db-b5f8-ae7b57a5aaa3-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.411828 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-774dj\" (UniqueName: \"kubernetes.io/projected/24bec749-145e-48db-b5f8-ae7b57a5aaa3-kube-api-access-774dj\") pod \"ovn-northd-0\" (UID: \"24bec749-145e-48db-b5f8-ae7b57a5aaa3\") " pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.412789 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kqf5\" (UniqueName: \"kubernetes.io/projected/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-kube-api-access-4kqf5\") pod \"dnsmasq-dns-86db49b7ff-tbrl8\" (UID: \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\") " pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.487976 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.581693 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.597036 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" podUID="d1bda1b5-8590-4e63-9676-25f6c58f072d" containerName="dnsmasq-dns" containerID="cri-o://2e580d0a3fa866789da378af5bba839931ce3563775e2727ee0a88ab54ee6aa4" gracePeriod=10 Feb 27 08:49:02 crc kubenswrapper[4906]: I0227 08:49:02.597631 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" podUID="8b70ee3a-8626-4cee-b807-b9fe7aebac31" containerName="dnsmasq-dns" containerID="cri-o://5ada162db631d2362f8bf4c1373fe65c23dd355ea9f3a74b0fadcdf24baffa77" gracePeriod=10 Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.183284 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.318525 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d1bda1b5-8590-4e63-9676-25f6c58f072d-dns-svc\") pod \"d1bda1b5-8590-4e63-9676-25f6c58f072d\" (UID: \"d1bda1b5-8590-4e63-9676-25f6c58f072d\") " Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.318624 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9npr\" (UniqueName: \"kubernetes.io/projected/d1bda1b5-8590-4e63-9676-25f6c58f072d-kube-api-access-t9npr\") pod \"d1bda1b5-8590-4e63-9676-25f6c58f072d\" (UID: \"d1bda1b5-8590-4e63-9676-25f6c58f072d\") " Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.318843 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1bda1b5-8590-4e63-9676-25f6c58f072d-config\") pod \"d1bda1b5-8590-4e63-9676-25f6c58f072d\" (UID: \"d1bda1b5-8590-4e63-9676-25f6c58f072d\") " Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.327726 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1bda1b5-8590-4e63-9676-25f6c58f072d-kube-api-access-t9npr" (OuterVolumeSpecName: "kube-api-access-t9npr") pod "d1bda1b5-8590-4e63-9676-25f6c58f072d" (UID: "d1bda1b5-8590-4e63-9676-25f6c58f072d"). InnerVolumeSpecName "kube-api-access-t9npr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.380816 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-7zlfd"] Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.383452 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1bda1b5-8590-4e63-9676-25f6c58f072d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d1bda1b5-8590-4e63-9676-25f6c58f072d" (UID: "d1bda1b5-8590-4e63-9676-25f6c58f072d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.428703 4906 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d1bda1b5-8590-4e63-9676-25f6c58f072d-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.428849 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9npr\" (UniqueName: \"kubernetes.io/projected/d1bda1b5-8590-4e63-9676-25f6c58f072d-kube-api-access-t9npr\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.435279 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1bda1b5-8590-4e63-9676-25f6c58f072d-config" (OuterVolumeSpecName: "config") pod "d1bda1b5-8590-4e63-9676-25f6c58f072d" (UID: "d1bda1b5-8590-4e63-9676-25f6c58f072d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.505827 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.536097 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1bda1b5-8590-4e63-9676-25f6c58f072d-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.547142 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-r4bd9"] Feb 27 08:49:03 crc kubenswrapper[4906]: W0227 08:49:03.550106 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9364b7c4_2498_4db2_b3ad_f4c229c03ee5.slice/crio-bf0e90e91a99c88c88e6f805275873c4b349234b4ffc29baa62b96796c50e7a3 WatchSource:0}: Error finding container bf0e90e91a99c88c88e6f805275873c4b349234b4ffc29baa62b96796c50e7a3: Status 404 returned error can't find the container with id bf0e90e91a99c88c88e6f805275873c4b349234b4ffc29baa62b96796c50e7a3 Feb 27 08:49:03 crc kubenswrapper[4906]: W0227 08:49:03.565988 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e02b99f_7f4c_43b3_9f00_422041bb0c7c.slice/crio-0d7010a86f36ed631fdb6ecd1bf1e8a9833ee0d709cc532d1e7a7fe8f9aa41a8 WatchSource:0}: Error finding container 0d7010a86f36ed631fdb6ecd1bf1e8a9833ee0d709cc532d1e7a7fe8f9aa41a8: Status 404 returned error can't find the container with id 0d7010a86f36ed631fdb6ecd1bf1e8a9833ee0d709cc532d1e7a7fe8f9aa41a8 Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.569012 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-tbrl8"] Feb 27 08:49:03 crc kubenswrapper[4906]: W0227 08:49:03.570039 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod24bec749_145e_48db_b5f8_ae7b57a5aaa3.slice/crio-05597bbd5b775d8e5ab2e0d770c2c85d57ba083321cc2fc20dc63ce3c0b475ec WatchSource:0}: Error finding container 05597bbd5b775d8e5ab2e0d770c2c85d57ba083321cc2fc20dc63ce3c0b475ec: Status 404 returned error can't find the container with id 05597bbd5b775d8e5ab2e0d770c2c85d57ba083321cc2fc20dc63ce3c0b475ec Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.581958 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.608630 4906 generic.go:334] "Generic (PLEG): container finished" podID="8b70ee3a-8626-4cee-b807-b9fe7aebac31" containerID="5ada162db631d2362f8bf4c1373fe65c23dd355ea9f3a74b0fadcdf24baffa77" exitCode=0 Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.608780 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.609015 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" event={"ID":"8b70ee3a-8626-4cee-b807-b9fe7aebac31","Type":"ContainerDied","Data":"5ada162db631d2362f8bf4c1373fe65c23dd355ea9f3a74b0fadcdf24baffa77"} Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.609064 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5ccc8479f9-4m2sq" event={"ID":"8b70ee3a-8626-4cee-b807-b9fe7aebac31","Type":"ContainerDied","Data":"827c0950abcd91bd1507b8137f4f24ad95643fa7dd6b94973ba82495c3adc652"} Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.609090 4906 scope.go:117] "RemoveContainer" containerID="5ada162db631d2362f8bf4c1373fe65c23dd355ea9f3a74b0fadcdf24baffa77" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.614497 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" event={"ID":"1e02b99f-7f4c-43b3-9f00-422041bb0c7c","Type":"ContainerStarted","Data":"0d7010a86f36ed631fdb6ecd1bf1e8a9833ee0d709cc532d1e7a7fe8f9aa41a8"} Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.618110 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" event={"ID":"9364b7c4-2498-4db2-b3ad-f4c229c03ee5","Type":"ContainerStarted","Data":"bf0e90e91a99c88c88e6f805275873c4b349234b4ffc29baa62b96796c50e7a3"} Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.620507 4906 generic.go:334] "Generic (PLEG): container finished" podID="d1bda1b5-8590-4e63-9676-25f6c58f072d" containerID="2e580d0a3fa866789da378af5bba839931ce3563775e2727ee0a88ab54ee6aa4" exitCode=0 Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.620600 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.620620 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" event={"ID":"d1bda1b5-8590-4e63-9676-25f6c58f072d","Type":"ContainerDied","Data":"2e580d0a3fa866789da378af5bba839931ce3563775e2727ee0a88ab54ee6aa4"} Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.620683 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-nt4fx" event={"ID":"d1bda1b5-8590-4e63-9676-25f6c58f072d","Type":"ContainerDied","Data":"1ac97f3a57e64646005ead74ec2ac3056f71f313e339cfd50dbb1c0ab20a8ac8"} Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.623993 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-7zlfd" event={"ID":"03e70640-9aec-465b-9141-7944e2a7aeb1","Type":"ContainerStarted","Data":"f4eb48a0567bff5188229df378f7c48786d04e43bdcadd57188f4842573240d7"} Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.626793 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"24bec749-145e-48db-b5f8-ae7b57a5aaa3","Type":"ContainerStarted","Data":"05597bbd5b775d8e5ab2e0d770c2c85d57ba083321cc2fc20dc63ce3c0b475ec"} Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.637770 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b70ee3a-8626-4cee-b807-b9fe7aebac31-dns-svc\") pod \"8b70ee3a-8626-4cee-b807-b9fe7aebac31\" (UID: \"8b70ee3a-8626-4cee-b807-b9fe7aebac31\") " Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.637872 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b70ee3a-8626-4cee-b807-b9fe7aebac31-config\") pod \"8b70ee3a-8626-4cee-b807-b9fe7aebac31\" (UID: \"8b70ee3a-8626-4cee-b807-b9fe7aebac31\") " Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.637995 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f6qdn\" (UniqueName: \"kubernetes.io/projected/8b70ee3a-8626-4cee-b807-b9fe7aebac31-kube-api-access-f6qdn\") pod \"8b70ee3a-8626-4cee-b807-b9fe7aebac31\" (UID: \"8b70ee3a-8626-4cee-b807-b9fe7aebac31\") " Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.644625 4906 scope.go:117] "RemoveContainer" containerID="457e9cd41f01d1409cd6bc1eba65e39cbf9ab4bf9582318a65b6641d936a9368" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.645310 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b70ee3a-8626-4cee-b807-b9fe7aebac31-kube-api-access-f6qdn" (OuterVolumeSpecName: "kube-api-access-f6qdn") pod "8b70ee3a-8626-4cee-b807-b9fe7aebac31" (UID: "8b70ee3a-8626-4cee-b807-b9fe7aebac31"). InnerVolumeSpecName "kube-api-access-f6qdn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.696965 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-nt4fx"] Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.700312 4906 scope.go:117] "RemoveContainer" containerID="5ada162db631d2362f8bf4c1373fe65c23dd355ea9f3a74b0fadcdf24baffa77" Feb 27 08:49:03 crc kubenswrapper[4906]: E0227 08:49:03.700934 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ada162db631d2362f8bf4c1373fe65c23dd355ea9f3a74b0fadcdf24baffa77\": container with ID starting with 5ada162db631d2362f8bf4c1373fe65c23dd355ea9f3a74b0fadcdf24baffa77 not found: ID does not exist" containerID="5ada162db631d2362f8bf4c1373fe65c23dd355ea9f3a74b0fadcdf24baffa77" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.700975 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ada162db631d2362f8bf4c1373fe65c23dd355ea9f3a74b0fadcdf24baffa77"} err="failed to get container status \"5ada162db631d2362f8bf4c1373fe65c23dd355ea9f3a74b0fadcdf24baffa77\": rpc error: code = NotFound desc = could not find container \"5ada162db631d2362f8bf4c1373fe65c23dd355ea9f3a74b0fadcdf24baffa77\": container with ID starting with 5ada162db631d2362f8bf4c1373fe65c23dd355ea9f3a74b0fadcdf24baffa77 not found: ID does not exist" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.701002 4906 scope.go:117] "RemoveContainer" containerID="457e9cd41f01d1409cd6bc1eba65e39cbf9ab4bf9582318a65b6641d936a9368" Feb 27 08:49:03 crc kubenswrapper[4906]: E0227 08:49:03.701330 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"457e9cd41f01d1409cd6bc1eba65e39cbf9ab4bf9582318a65b6641d936a9368\": container with ID starting with 457e9cd41f01d1409cd6bc1eba65e39cbf9ab4bf9582318a65b6641d936a9368 not found: ID does not exist" containerID="457e9cd41f01d1409cd6bc1eba65e39cbf9ab4bf9582318a65b6641d936a9368" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.701359 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"457e9cd41f01d1409cd6bc1eba65e39cbf9ab4bf9582318a65b6641d936a9368"} err="failed to get container status \"457e9cd41f01d1409cd6bc1eba65e39cbf9ab4bf9582318a65b6641d936a9368\": rpc error: code = NotFound desc = could not find container \"457e9cd41f01d1409cd6bc1eba65e39cbf9ab4bf9582318a65b6641d936a9368\": container with ID starting with 457e9cd41f01d1409cd6bc1eba65e39cbf9ab4bf9582318a65b6641d936a9368 not found: ID does not exist" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.701378 4906 scope.go:117] "RemoveContainer" containerID="2e580d0a3fa866789da378af5bba839931ce3563775e2727ee0a88ab54ee6aa4" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.705502 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-nt4fx"] Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.710864 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b70ee3a-8626-4cee-b807-b9fe7aebac31-config" (OuterVolumeSpecName: "config") pod "8b70ee3a-8626-4cee-b807-b9fe7aebac31" (UID: "8b70ee3a-8626-4cee-b807-b9fe7aebac31"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.721778 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b70ee3a-8626-4cee-b807-b9fe7aebac31-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8b70ee3a-8626-4cee-b807-b9fe7aebac31" (UID: "8b70ee3a-8626-4cee-b807-b9fe7aebac31"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.723802 4906 scope.go:117] "RemoveContainer" containerID="30c93dfad1f536284e584668e868f4877f4460d87fdc59b92ccd77dabcb12db5" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.740084 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b70ee3a-8626-4cee-b807-b9fe7aebac31-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.740124 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f6qdn\" (UniqueName: \"kubernetes.io/projected/8b70ee3a-8626-4cee-b807-b9fe7aebac31-kube-api-access-f6qdn\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.740136 4906 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b70ee3a-8626-4cee-b807-b9fe7aebac31-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.744899 4906 scope.go:117] "RemoveContainer" containerID="2e580d0a3fa866789da378af5bba839931ce3563775e2727ee0a88ab54ee6aa4" Feb 27 08:49:03 crc kubenswrapper[4906]: E0227 08:49:03.745514 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e580d0a3fa866789da378af5bba839931ce3563775e2727ee0a88ab54ee6aa4\": container with ID starting with 2e580d0a3fa866789da378af5bba839931ce3563775e2727ee0a88ab54ee6aa4 not found: ID does not exist" containerID="2e580d0a3fa866789da378af5bba839931ce3563775e2727ee0a88ab54ee6aa4" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.745554 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e580d0a3fa866789da378af5bba839931ce3563775e2727ee0a88ab54ee6aa4"} err="failed to get container status \"2e580d0a3fa866789da378af5bba839931ce3563775e2727ee0a88ab54ee6aa4\": rpc error: code = NotFound desc = could not find container \"2e580d0a3fa866789da378af5bba839931ce3563775e2727ee0a88ab54ee6aa4\": container with ID starting with 2e580d0a3fa866789da378af5bba839931ce3563775e2727ee0a88ab54ee6aa4 not found: ID does not exist" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.745582 4906 scope.go:117] "RemoveContainer" containerID="30c93dfad1f536284e584668e868f4877f4460d87fdc59b92ccd77dabcb12db5" Feb 27 08:49:03 crc kubenswrapper[4906]: E0227 08:49:03.746470 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30c93dfad1f536284e584668e868f4877f4460d87fdc59b92ccd77dabcb12db5\": container with ID starting with 30c93dfad1f536284e584668e868f4877f4460d87fdc59b92ccd77dabcb12db5 not found: ID does not exist" containerID="30c93dfad1f536284e584668e868f4877f4460d87fdc59b92ccd77dabcb12db5" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.746509 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30c93dfad1f536284e584668e868f4877f4460d87fdc59b92ccd77dabcb12db5"} err="failed to get container status \"30c93dfad1f536284e584668e868f4877f4460d87fdc59b92ccd77dabcb12db5\": rpc error: code = NotFound desc = could not find container \"30c93dfad1f536284e584668e868f4877f4460d87fdc59b92ccd77dabcb12db5\": container with ID starting with 30c93dfad1f536284e584668e868f4877f4460d87fdc59b92ccd77dabcb12db5 not found: ID does not exist" Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.946201 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-4m2sq"] Feb 27 08:49:03 crc kubenswrapper[4906]: I0227 08:49:03.953382 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5ccc8479f9-4m2sq"] Feb 27 08:49:04 crc kubenswrapper[4906]: I0227 08:49:04.078586 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-nbf48"] Feb 27 08:49:04 crc kubenswrapper[4906]: E0227 08:49:04.079343 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b70ee3a-8626-4cee-b807-b9fe7aebac31" containerName="init" Feb 27 08:49:04 crc kubenswrapper[4906]: I0227 08:49:04.079415 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b70ee3a-8626-4cee-b807-b9fe7aebac31" containerName="init" Feb 27 08:49:04 crc kubenswrapper[4906]: E0227 08:49:04.079521 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b70ee3a-8626-4cee-b807-b9fe7aebac31" containerName="dnsmasq-dns" Feb 27 08:49:04 crc kubenswrapper[4906]: I0227 08:49:04.079577 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b70ee3a-8626-4cee-b807-b9fe7aebac31" containerName="dnsmasq-dns" Feb 27 08:49:04 crc kubenswrapper[4906]: E0227 08:49:04.079658 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1bda1b5-8590-4e63-9676-25f6c58f072d" containerName="init" Feb 27 08:49:04 crc kubenswrapper[4906]: I0227 08:49:04.079722 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1bda1b5-8590-4e63-9676-25f6c58f072d" containerName="init" Feb 27 08:49:04 crc kubenswrapper[4906]: E0227 08:49:04.079784 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1bda1b5-8590-4e63-9676-25f6c58f072d" containerName="dnsmasq-dns" Feb 27 08:49:04 crc kubenswrapper[4906]: I0227 08:49:04.079836 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1bda1b5-8590-4e63-9676-25f6c58f072d" containerName="dnsmasq-dns" Feb 27 08:49:04 crc kubenswrapper[4906]: I0227 08:49:04.080064 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1bda1b5-8590-4e63-9676-25f6c58f072d" containerName="dnsmasq-dns" Feb 27 08:49:04 crc kubenswrapper[4906]: I0227 08:49:04.080138 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b70ee3a-8626-4cee-b807-b9fe7aebac31" containerName="dnsmasq-dns" Feb 27 08:49:04 crc kubenswrapper[4906]: I0227 08:49:04.080809 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-nbf48" Feb 27 08:49:04 crc kubenswrapper[4906]: I0227 08:49:04.083998 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Feb 27 08:49:04 crc kubenswrapper[4906]: I0227 08:49:04.103916 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-nbf48"] Feb 27 08:49:04 crc kubenswrapper[4906]: I0227 08:49:04.254992 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19570524-db36-4ef4-893a-fc108043dd6d-operator-scripts\") pod \"root-account-create-update-nbf48\" (UID: \"19570524-db36-4ef4-893a-fc108043dd6d\") " pod="openstack/root-account-create-update-nbf48" Feb 27 08:49:04 crc kubenswrapper[4906]: I0227 08:49:04.255095 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sxkxr\" (UniqueName: \"kubernetes.io/projected/19570524-db36-4ef4-893a-fc108043dd6d-kube-api-access-sxkxr\") pod \"root-account-create-update-nbf48\" (UID: \"19570524-db36-4ef4-893a-fc108043dd6d\") " pod="openstack/root-account-create-update-nbf48" Feb 27 08:49:04 crc kubenswrapper[4906]: I0227 08:49:04.357067 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19570524-db36-4ef4-893a-fc108043dd6d-operator-scripts\") pod \"root-account-create-update-nbf48\" (UID: \"19570524-db36-4ef4-893a-fc108043dd6d\") " pod="openstack/root-account-create-update-nbf48" Feb 27 08:49:04 crc kubenswrapper[4906]: I0227 08:49:04.358259 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sxkxr\" (UniqueName: \"kubernetes.io/projected/19570524-db36-4ef4-893a-fc108043dd6d-kube-api-access-sxkxr\") pod \"root-account-create-update-nbf48\" (UID: \"19570524-db36-4ef4-893a-fc108043dd6d\") " pod="openstack/root-account-create-update-nbf48" Feb 27 08:49:04 crc kubenswrapper[4906]: I0227 08:49:04.358091 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19570524-db36-4ef4-893a-fc108043dd6d-operator-scripts\") pod \"root-account-create-update-nbf48\" (UID: \"19570524-db36-4ef4-893a-fc108043dd6d\") " pod="openstack/root-account-create-update-nbf48" Feb 27 08:49:04 crc kubenswrapper[4906]: I0227 08:49:04.381744 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sxkxr\" (UniqueName: \"kubernetes.io/projected/19570524-db36-4ef4-893a-fc108043dd6d-kube-api-access-sxkxr\") pod \"root-account-create-update-nbf48\" (UID: \"19570524-db36-4ef4-893a-fc108043dd6d\") " pod="openstack/root-account-create-update-nbf48" Feb 27 08:49:04 crc kubenswrapper[4906]: I0227 08:49:04.400791 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-nbf48" Feb 27 08:49:04 crc kubenswrapper[4906]: I0227 08:49:04.572950 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b70ee3a-8626-4cee-b807-b9fe7aebac31" path="/var/lib/kubelet/pods/8b70ee3a-8626-4cee-b807-b9fe7aebac31/volumes" Feb 27 08:49:04 crc kubenswrapper[4906]: I0227 08:49:04.573819 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1bda1b5-8590-4e63-9676-25f6c58f072d" path="/var/lib/kubelet/pods/d1bda1b5-8590-4e63-9676-25f6c58f072d/volumes" Feb 27 08:49:04 crc kubenswrapper[4906]: I0227 08:49:04.682901 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-nbf48"] Feb 27 08:49:05 crc kubenswrapper[4906]: I0227 08:49:05.647861 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-nbf48" event={"ID":"19570524-db36-4ef4-893a-fc108043dd6d","Type":"ContainerStarted","Data":"db3713a0f58d86ec8631dc79cc2ca8319f9f1642980df166d08fff2b46c7ec1b"} Feb 27 08:49:05 crc kubenswrapper[4906]: I0227 08:49:05.650391 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" event={"ID":"1e02b99f-7f4c-43b3-9f00-422041bb0c7c","Type":"ContainerStarted","Data":"fafc5c0c404d230ec5c81ae8728858aac2545af5a042343a16927d12242dd4d7"} Feb 27 08:49:06 crc kubenswrapper[4906]: I0227 08:49:06.633961 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Feb 27 08:49:06 crc kubenswrapper[4906]: I0227 08:49:06.634380 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Feb 27 08:49:06 crc kubenswrapper[4906]: I0227 08:49:06.665914 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" event={"ID":"9364b7c4-2498-4db2-b3ad-f4c229c03ee5","Type":"ContainerStarted","Data":"1359b9fd2473e4346d8388f31e95a9d81023cae2cce741aed2d418b50df9ebc3"} Feb 27 08:49:06 crc kubenswrapper[4906]: I0227 08:49:06.668535 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-7zlfd" event={"ID":"03e70640-9aec-465b-9141-7944e2a7aeb1","Type":"ContainerStarted","Data":"988a0a057b7bbd4319aa132763b66a0f082c1f1404d2ff52a5d1c02b5fe1978c"} Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.060674 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.258993 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-zgnvf"] Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.260123 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-zgnvf" Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.269429 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-zgnvf"] Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.361119 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-2a5c-account-create-update-vfl7s"] Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.362308 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2a5c-account-create-update-vfl7s" Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.366117 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.372593 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-2a5c-account-create-update-vfl7s"] Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.410462 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsh9x\" (UniqueName: \"kubernetes.io/projected/b44567c9-a11c-4760-8ee3-43d4c96fa180-kube-api-access-gsh9x\") pod \"glance-db-create-zgnvf\" (UID: \"b44567c9-a11c-4760-8ee3-43d4c96fa180\") " pod="openstack/glance-db-create-zgnvf" Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.410540 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b44567c9-a11c-4760-8ee3-43d4c96fa180-operator-scripts\") pod \"glance-db-create-zgnvf\" (UID: \"b44567c9-a11c-4760-8ee3-43d4c96fa180\") " pod="openstack/glance-db-create-zgnvf" Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.512496 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69ae1e99-afff-4a95-bc00-d1891e12976d-operator-scripts\") pod \"glance-2a5c-account-create-update-vfl7s\" (UID: \"69ae1e99-afff-4a95-bc00-d1891e12976d\") " pod="openstack/glance-2a5c-account-create-update-vfl7s" Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.512577 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wshwl\" (UniqueName: \"kubernetes.io/projected/69ae1e99-afff-4a95-bc00-d1891e12976d-kube-api-access-wshwl\") pod \"glance-2a5c-account-create-update-vfl7s\" (UID: \"69ae1e99-afff-4a95-bc00-d1891e12976d\") " pod="openstack/glance-2a5c-account-create-update-vfl7s" Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.512913 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsh9x\" (UniqueName: \"kubernetes.io/projected/b44567c9-a11c-4760-8ee3-43d4c96fa180-kube-api-access-gsh9x\") pod \"glance-db-create-zgnvf\" (UID: \"b44567c9-a11c-4760-8ee3-43d4c96fa180\") " pod="openstack/glance-db-create-zgnvf" Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.513059 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b44567c9-a11c-4760-8ee3-43d4c96fa180-operator-scripts\") pod \"glance-db-create-zgnvf\" (UID: \"b44567c9-a11c-4760-8ee3-43d4c96fa180\") " pod="openstack/glance-db-create-zgnvf" Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.514135 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b44567c9-a11c-4760-8ee3-43d4c96fa180-operator-scripts\") pod \"glance-db-create-zgnvf\" (UID: \"b44567c9-a11c-4760-8ee3-43d4c96fa180\") " pod="openstack/glance-db-create-zgnvf" Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.556940 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsh9x\" (UniqueName: \"kubernetes.io/projected/b44567c9-a11c-4760-8ee3-43d4c96fa180-kube-api-access-gsh9x\") pod \"glance-db-create-zgnvf\" (UID: \"b44567c9-a11c-4760-8ee3-43d4c96fa180\") " pod="openstack/glance-db-create-zgnvf" Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.582922 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-zgnvf" Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.614935 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wshwl\" (UniqueName: \"kubernetes.io/projected/69ae1e99-afff-4a95-bc00-d1891e12976d-kube-api-access-wshwl\") pod \"glance-2a5c-account-create-update-vfl7s\" (UID: \"69ae1e99-afff-4a95-bc00-d1891e12976d\") " pod="openstack/glance-2a5c-account-create-update-vfl7s" Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.615148 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69ae1e99-afff-4a95-bc00-d1891e12976d-operator-scripts\") pod \"glance-2a5c-account-create-update-vfl7s\" (UID: \"69ae1e99-afff-4a95-bc00-d1891e12976d\") " pod="openstack/glance-2a5c-account-create-update-vfl7s" Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.616095 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69ae1e99-afff-4a95-bc00-d1891e12976d-operator-scripts\") pod \"glance-2a5c-account-create-update-vfl7s\" (UID: \"69ae1e99-afff-4a95-bc00-d1891e12976d\") " pod="openstack/glance-2a5c-account-create-update-vfl7s" Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.660723 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wshwl\" (UniqueName: \"kubernetes.io/projected/69ae1e99-afff-4a95-bc00-d1891e12976d-kube-api-access-wshwl\") pod \"glance-2a5c-account-create-update-vfl7s\" (UID: \"69ae1e99-afff-4a95-bc00-d1891e12976d\") " pod="openstack/glance-2a5c-account-create-update-vfl7s" Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.683744 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2a5c-account-create-update-vfl7s" Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.709831 4906 generic.go:334] "Generic (PLEG): container finished" podID="1e02b99f-7f4c-43b3-9f00-422041bb0c7c" containerID="fafc5c0c404d230ec5c81ae8728858aac2545af5a042343a16927d12242dd4d7" exitCode=0 Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.709908 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" event={"ID":"1e02b99f-7f4c-43b3-9f00-422041bb0c7c","Type":"ContainerDied","Data":"fafc5c0c404d230ec5c81ae8728858aac2545af5a042343a16927d12242dd4d7"} Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.931866 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-w76h9"] Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.934229 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-w76h9" Feb 27 08:49:07 crc kubenswrapper[4906]: I0227 08:49:07.944250 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-w76h9"] Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:07.995744 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bb99-account-create-update-ppmpk"] Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:07.996832 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bb99-account-create-update-ppmpk" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.000841 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.010641 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bb99-account-create-update-ppmpk"] Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.028274 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bq7ll\" (UniqueName: \"kubernetes.io/projected/4d108c06-2f21-4a05-87ab-caccb04415b5-kube-api-access-bq7ll\") pod \"keystone-db-create-w76h9\" (UID: \"4d108c06-2f21-4a05-87ab-caccb04415b5\") " pod="openstack/keystone-db-create-w76h9" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.030296 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d108c06-2f21-4a05-87ab-caccb04415b5-operator-scripts\") pod \"keystone-db-create-w76h9\" (UID: \"4d108c06-2f21-4a05-87ab-caccb04415b5\") " pod="openstack/keystone-db-create-w76h9" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.132458 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26a00786-e81f-488f-88fe-ffc98c7f63e2-operator-scripts\") pod \"keystone-bb99-account-create-update-ppmpk\" (UID: \"26a00786-e81f-488f-88fe-ffc98c7f63e2\") " pod="openstack/keystone-bb99-account-create-update-ppmpk" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.132539 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2hng\" (UniqueName: \"kubernetes.io/projected/26a00786-e81f-488f-88fe-ffc98c7f63e2-kube-api-access-h2hng\") pod \"keystone-bb99-account-create-update-ppmpk\" (UID: \"26a00786-e81f-488f-88fe-ffc98c7f63e2\") " pod="openstack/keystone-bb99-account-create-update-ppmpk" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.132594 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bq7ll\" (UniqueName: \"kubernetes.io/projected/4d108c06-2f21-4a05-87ab-caccb04415b5-kube-api-access-bq7ll\") pod \"keystone-db-create-w76h9\" (UID: \"4d108c06-2f21-4a05-87ab-caccb04415b5\") " pod="openstack/keystone-db-create-w76h9" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.132626 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d108c06-2f21-4a05-87ab-caccb04415b5-operator-scripts\") pod \"keystone-db-create-w76h9\" (UID: \"4d108c06-2f21-4a05-87ab-caccb04415b5\") " pod="openstack/keystone-db-create-w76h9" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.133744 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d108c06-2f21-4a05-87ab-caccb04415b5-operator-scripts\") pod \"keystone-db-create-w76h9\" (UID: \"4d108c06-2f21-4a05-87ab-caccb04415b5\") " pod="openstack/keystone-db-create-w76h9" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.168528 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-bx2nz"] Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.170182 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-bx2nz" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.173994 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bq7ll\" (UniqueName: \"kubernetes.io/projected/4d108c06-2f21-4a05-87ab-caccb04415b5-kube-api-access-bq7ll\") pod \"keystone-db-create-w76h9\" (UID: \"4d108c06-2f21-4a05-87ab-caccb04415b5\") " pod="openstack/keystone-db-create-w76h9" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.180810 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-bx2nz"] Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.234252 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26a00786-e81f-488f-88fe-ffc98c7f63e2-operator-scripts\") pod \"keystone-bb99-account-create-update-ppmpk\" (UID: \"26a00786-e81f-488f-88fe-ffc98c7f63e2\") " pod="openstack/keystone-bb99-account-create-update-ppmpk" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.234309 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2hng\" (UniqueName: \"kubernetes.io/projected/26a00786-e81f-488f-88fe-ffc98c7f63e2-kube-api-access-h2hng\") pod \"keystone-bb99-account-create-update-ppmpk\" (UID: \"26a00786-e81f-488f-88fe-ffc98c7f63e2\") " pod="openstack/keystone-bb99-account-create-update-ppmpk" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.235416 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26a00786-e81f-488f-88fe-ffc98c7f63e2-operator-scripts\") pod \"keystone-bb99-account-create-update-ppmpk\" (UID: \"26a00786-e81f-488f-88fe-ffc98c7f63e2\") " pod="openstack/keystone-bb99-account-create-update-ppmpk" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.251327 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-zgnvf"] Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.257103 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2hng\" (UniqueName: \"kubernetes.io/projected/26a00786-e81f-488f-88fe-ffc98c7f63e2-kube-api-access-h2hng\") pod \"keystone-bb99-account-create-update-ppmpk\" (UID: \"26a00786-e81f-488f-88fe-ffc98c7f63e2\") " pod="openstack/keystone-bb99-account-create-update-ppmpk" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.263450 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-w76h9" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.292643 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-1cea-account-create-update-s8q8n"] Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.294316 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1cea-account-create-update-s8q8n" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.307163 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-1cea-account-create-update-s8q8n"] Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.313495 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.318117 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-2a5c-account-create-update-vfl7s"] Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.328646 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bb99-account-create-update-ppmpk" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.338333 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0eca1824-e2a4-4bac-bc2d-8e2035dae7c7-operator-scripts\") pod \"placement-db-create-bx2nz\" (UID: \"0eca1824-e2a4-4bac-bc2d-8e2035dae7c7\") " pod="openstack/placement-db-create-bx2nz" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.338429 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64lpp\" (UniqueName: \"kubernetes.io/projected/0eca1824-e2a4-4bac-bc2d-8e2035dae7c7-kube-api-access-64lpp\") pod \"placement-db-create-bx2nz\" (UID: \"0eca1824-e2a4-4bac-bc2d-8e2035dae7c7\") " pod="openstack/placement-db-create-bx2nz" Feb 27 08:49:08 crc kubenswrapper[4906]: W0227 08:49:08.343269 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod69ae1e99_afff_4a95_bc00_d1891e12976d.slice/crio-331f24560fe44105d5d8785f2b7106c5c1fb3409f6d015731e99977667e33422 WatchSource:0}: Error finding container 331f24560fe44105d5d8785f2b7106c5c1fb3409f6d015731e99977667e33422: Status 404 returned error can't find the container with id 331f24560fe44105d5d8785f2b7106c5c1fb3409f6d015731e99977667e33422 Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.440050 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1fe6339-ed29-4e33-a161-12b6c001a7e3-operator-scripts\") pod \"placement-1cea-account-create-update-s8q8n\" (UID: \"c1fe6339-ed29-4e33-a161-12b6c001a7e3\") " pod="openstack/placement-1cea-account-create-update-s8q8n" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.440127 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0eca1824-e2a4-4bac-bc2d-8e2035dae7c7-operator-scripts\") pod \"placement-db-create-bx2nz\" (UID: \"0eca1824-e2a4-4bac-bc2d-8e2035dae7c7\") " pod="openstack/placement-db-create-bx2nz" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.440184 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64lpp\" (UniqueName: \"kubernetes.io/projected/0eca1824-e2a4-4bac-bc2d-8e2035dae7c7-kube-api-access-64lpp\") pod \"placement-db-create-bx2nz\" (UID: \"0eca1824-e2a4-4bac-bc2d-8e2035dae7c7\") " pod="openstack/placement-db-create-bx2nz" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.440259 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dbchc\" (UniqueName: \"kubernetes.io/projected/c1fe6339-ed29-4e33-a161-12b6c001a7e3-kube-api-access-dbchc\") pod \"placement-1cea-account-create-update-s8q8n\" (UID: \"c1fe6339-ed29-4e33-a161-12b6c001a7e3\") " pod="openstack/placement-1cea-account-create-update-s8q8n" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.441287 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0eca1824-e2a4-4bac-bc2d-8e2035dae7c7-operator-scripts\") pod \"placement-db-create-bx2nz\" (UID: \"0eca1824-e2a4-4bac-bc2d-8e2035dae7c7\") " pod="openstack/placement-db-create-bx2nz" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.473418 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64lpp\" (UniqueName: \"kubernetes.io/projected/0eca1824-e2a4-4bac-bc2d-8e2035dae7c7-kube-api-access-64lpp\") pod \"placement-db-create-bx2nz\" (UID: \"0eca1824-e2a4-4bac-bc2d-8e2035dae7c7\") " pod="openstack/placement-db-create-bx2nz" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.496909 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-bx2nz" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.541331 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dbchc\" (UniqueName: \"kubernetes.io/projected/c1fe6339-ed29-4e33-a161-12b6c001a7e3-kube-api-access-dbchc\") pod \"placement-1cea-account-create-update-s8q8n\" (UID: \"c1fe6339-ed29-4e33-a161-12b6c001a7e3\") " pod="openstack/placement-1cea-account-create-update-s8q8n" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.541567 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1fe6339-ed29-4e33-a161-12b6c001a7e3-operator-scripts\") pod \"placement-1cea-account-create-update-s8q8n\" (UID: \"c1fe6339-ed29-4e33-a161-12b6c001a7e3\") " pod="openstack/placement-1cea-account-create-update-s8q8n" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.542525 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1fe6339-ed29-4e33-a161-12b6c001a7e3-operator-scripts\") pod \"placement-1cea-account-create-update-s8q8n\" (UID: \"c1fe6339-ed29-4e33-a161-12b6c001a7e3\") " pod="openstack/placement-1cea-account-create-update-s8q8n" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.561453 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dbchc\" (UniqueName: \"kubernetes.io/projected/c1fe6339-ed29-4e33-a161-12b6c001a7e3-kube-api-access-dbchc\") pod \"placement-1cea-account-create-update-s8q8n\" (UID: \"c1fe6339-ed29-4e33-a161-12b6c001a7e3\") " pod="openstack/placement-1cea-account-create-update-s8q8n" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.618066 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1cea-account-create-update-s8q8n" Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.649162 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-w76h9"] Feb 27 08:49:08 crc kubenswrapper[4906]: W0227 08:49:08.710680 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4d108c06_2f21_4a05_87ab_caccb04415b5.slice/crio-dd2c81cef735ed9837b906d3d93a2f8362198f520114f557c999c636381cd601 WatchSource:0}: Error finding container dd2c81cef735ed9837b906d3d93a2f8362198f520114f557c999c636381cd601: Status 404 returned error can't find the container with id dd2c81cef735ed9837b906d3d93a2f8362198f520114f557c999c636381cd601 Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.730640 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-nbf48" event={"ID":"19570524-db36-4ef4-893a-fc108043dd6d","Type":"ContainerStarted","Data":"043eef2a73f282b18bd7c046ef10a369c35da12a4fa980643dcecde7b720cba1"} Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.734550 4906 generic.go:334] "Generic (PLEG): container finished" podID="9364b7c4-2498-4db2-b3ad-f4c229c03ee5" containerID="1359b9fd2473e4346d8388f31e95a9d81023cae2cce741aed2d418b50df9ebc3" exitCode=0 Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.734628 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" event={"ID":"9364b7c4-2498-4db2-b3ad-f4c229c03ee5","Type":"ContainerDied","Data":"1359b9fd2473e4346d8388f31e95a9d81023cae2cce741aed2d418b50df9ebc3"} Feb 27 08:49:08 crc kubenswrapper[4906]: I0227 08:49:08.741327 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2a5c-account-create-update-vfl7s" event={"ID":"69ae1e99-afff-4a95-bc00-d1891e12976d","Type":"ContainerStarted","Data":"331f24560fe44105d5d8785f2b7106c5c1fb3409f6d015731e99977667e33422"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:08.766895 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-zgnvf" event={"ID":"b44567c9-a11c-4760-8ee3-43d4c96fa180","Type":"ContainerStarted","Data":"e3a15a3d858ab496b4f46aa934a4f19e57bb12265a396e63002f826bb5ad0db5"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:08.808011 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-7zlfd" podStartSLOduration=7.807981771 podStartE2EDuration="7.807981771s" podCreationTimestamp="2026-02-27 08:49:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:49:08.803854123 +0000 UTC m=+1247.198255733" watchObservedRunningTime="2026-02-27 08:49:08.807981771 +0000 UTC m=+1247.202383381" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:08.920136 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bb99-account-create-update-ppmpk"] Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.476531 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-r4bd9"] Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.507586 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-jdnzx"] Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.510217 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.527844 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-jdnzx"] Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.671351 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mk762\" (UniqueName: \"kubernetes.io/projected/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-kube-api-access-mk762\") pod \"dnsmasq-dns-698758b865-jdnzx\" (UID: \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\") " pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.671447 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-dns-svc\") pod \"dnsmasq-dns-698758b865-jdnzx\" (UID: \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\") " pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.671564 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-jdnzx\" (UID: \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\") " pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.671906 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-jdnzx\" (UID: \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\") " pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.672013 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-config\") pod \"dnsmasq-dns-698758b865-jdnzx\" (UID: \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\") " pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.773140 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-jdnzx\" (UID: \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\") " pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.773237 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-config\") pod \"dnsmasq-dns-698758b865-jdnzx\" (UID: \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\") " pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.774367 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-config\") pod \"dnsmasq-dns-698758b865-jdnzx\" (UID: \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\") " pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.774443 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-jdnzx\" (UID: \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\") " pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.774469 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mk762\" (UniqueName: \"kubernetes.io/projected/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-kube-api-access-mk762\") pod \"dnsmasq-dns-698758b865-jdnzx\" (UID: \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\") " pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.774645 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-dns-svc\") pod \"dnsmasq-dns-698758b865-jdnzx\" (UID: \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\") " pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.774691 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-jdnzx\" (UID: \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\") " pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.775633 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-jdnzx\" (UID: \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\") " pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.776010 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-dns-svc\") pod \"dnsmasq-dns-698758b865-jdnzx\" (UID: \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\") " pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.778124 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-zgnvf" event={"ID":"b44567c9-a11c-4760-8ee3-43d4c96fa180","Type":"ContainerStarted","Data":"27300d3014c44c93b3e33ddff36683a17ed53775e270bf2a9fa6521738b5f303"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.780412 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-w76h9" event={"ID":"4d108c06-2f21-4a05-87ab-caccb04415b5","Type":"ContainerStarted","Data":"dd2c81cef735ed9837b906d3d93a2f8362198f520114f557c999c636381cd601"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.782476 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2a5c-account-create-update-vfl7s" event={"ID":"69ae1e99-afff-4a95-bc00-d1891e12976d","Type":"ContainerStarted","Data":"cc88132b664db7e01dd9c0d0a568257692b261fa19a6d9b1d02e77ef161c86b8"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.803667 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mk762\" (UniqueName: \"kubernetes.io/projected/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-kube-api-access-mk762\") pod \"dnsmasq-dns-698758b865-jdnzx\" (UID: \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\") " pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.826298 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/root-account-create-update-nbf48" podStartSLOduration=5.826276356 podStartE2EDuration="5.826276356s" podCreationTimestamp="2026-02-27 08:49:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:49:09.82567865 +0000 UTC m=+1248.220080260" watchObservedRunningTime="2026-02-27 08:49:09.826276356 +0000 UTC m=+1248.220677956" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.826836 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-create-zgnvf" podStartSLOduration=2.82683204 podStartE2EDuration="2.82683204s" podCreationTimestamp="2026-02-27 08:49:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:49:09.795730344 +0000 UTC m=+1248.190131954" watchObservedRunningTime="2026-02-27 08:49:09.82683204 +0000 UTC m=+1248.221233650" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.839161 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:09.861365 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-2a5c-account-create-update-vfl7s" podStartSLOduration=2.861341766 podStartE2EDuration="2.861341766s" podCreationTimestamp="2026-02-27 08:49:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:49:09.854480176 +0000 UTC m=+1248.248881786" watchObservedRunningTime="2026-02-27 08:49:09.861341766 +0000 UTC m=+1248.255743376" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.617494 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.631062 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.642718 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.643126 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.643339 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.643392 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.646687 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-ddmbz" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.802865 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.802939 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c98486bd-1325-4072-bce0-a28d38ecead2-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.802983 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.803019 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5m7lb\" (UniqueName: \"kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-kube-api-access-5m7lb\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.803041 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/c98486bd-1325-4072-bce0-a28d38ecead2-cache\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.803059 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/c98486bd-1325-4072-bce0-a28d38ecead2-lock\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.905340 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.905391 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c98486bd-1325-4072-bce0-a28d38ecead2-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.905443 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.905489 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5m7lb\" (UniqueName: \"kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-kube-api-access-5m7lb\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.905508 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/c98486bd-1325-4072-bce0-a28d38ecead2-cache\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.905530 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/c98486bd-1325-4072-bce0-a28d38ecead2-lock\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: E0227 08:49:10.906430 4906 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 27 08:49:23 crc kubenswrapper[4906]: E0227 08:49:10.906475 4906 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.906514 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/c98486bd-1325-4072-bce0-a28d38ecead2-lock\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: E0227 08:49:10.906557 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift podName:c98486bd-1325-4072-bce0-a28d38ecead2 nodeName:}" failed. No retries permitted until 2026-02-27 08:49:11.406531677 +0000 UTC m=+1249.800933287 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift") pod "swift-storage-0" (UID: "c98486bd-1325-4072-bce0-a28d38ecead2") : configmap "swift-ring-files" not found Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.906679 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/c98486bd-1325-4072-bce0-a28d38ecead2-cache\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.906546 4906 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.919854 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c98486bd-1325-4072-bce0-a28d38ecead2-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.927849 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5m7lb\" (UniqueName: \"kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-kube-api-access-5m7lb\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:10.940877 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.100164 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-6jk9r"] Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.101321 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.104025 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.104472 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.104805 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.120273 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-6jk9r"] Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.211757 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/db586541-2471-4a37-a7b6-3c8f324a696b-etc-swift\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.211848 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hpk8\" (UniqueName: \"kubernetes.io/projected/db586541-2471-4a37-a7b6-3c8f324a696b-kube-api-access-5hpk8\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.212102 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/db586541-2471-4a37-a7b6-3c8f324a696b-dispersionconf\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.212307 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db586541-2471-4a37-a7b6-3c8f324a696b-combined-ca-bundle\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.212488 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/db586541-2471-4a37-a7b6-3c8f324a696b-ring-data-devices\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.212522 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db586541-2471-4a37-a7b6-3c8f324a696b-scripts\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.212664 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/db586541-2471-4a37-a7b6-3c8f324a696b-swiftconf\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.314675 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/db586541-2471-4a37-a7b6-3c8f324a696b-dispersionconf\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.314753 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db586541-2471-4a37-a7b6-3c8f324a696b-combined-ca-bundle\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.314798 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db586541-2471-4a37-a7b6-3c8f324a696b-scripts\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.314814 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/db586541-2471-4a37-a7b6-3c8f324a696b-ring-data-devices\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.314836 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/db586541-2471-4a37-a7b6-3c8f324a696b-swiftconf\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.314874 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/db586541-2471-4a37-a7b6-3c8f324a696b-etc-swift\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.315386 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hpk8\" (UniqueName: \"kubernetes.io/projected/db586541-2471-4a37-a7b6-3c8f324a696b-kube-api-access-5hpk8\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.315543 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/db586541-2471-4a37-a7b6-3c8f324a696b-etc-swift\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.315917 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db586541-2471-4a37-a7b6-3c8f324a696b-scripts\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.316105 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/db586541-2471-4a37-a7b6-3c8f324a696b-ring-data-devices\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.318404 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/db586541-2471-4a37-a7b6-3c8f324a696b-swiftconf\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.318779 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/db586541-2471-4a37-a7b6-3c8f324a696b-dispersionconf\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.318834 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db586541-2471-4a37-a7b6-3c8f324a696b-combined-ca-bundle\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.335029 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hpk8\" (UniqueName: \"kubernetes.io/projected/db586541-2471-4a37-a7b6-3c8f324a696b-kube-api-access-5hpk8\") pod \"swift-ring-rebalance-6jk9r\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.417601 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: E0227 08:49:11.417848 4906 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 27 08:49:23 crc kubenswrapper[4906]: E0227 08:49:11.417910 4906 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 27 08:49:23 crc kubenswrapper[4906]: E0227 08:49:11.417985 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift podName:c98486bd-1325-4072-bce0-a28d38ecead2 nodeName:}" failed. No retries permitted until 2026-02-27 08:49:12.417964113 +0000 UTC m=+1250.812365723 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift") pod "swift-storage-0" (UID: "c98486bd-1325-4072-bce0-a28d38ecead2") : configmap "swift-ring-files" not found Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:11.463485 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:12.436062 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: E0227 08:49:12.436376 4906 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 27 08:49:23 crc kubenswrapper[4906]: E0227 08:49:12.436643 4906 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 27 08:49:23 crc kubenswrapper[4906]: E0227 08:49:12.436719 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift podName:c98486bd-1325-4072-bce0-a28d38ecead2 nodeName:}" failed. No retries permitted until 2026-02-27 08:49:14.436697289 +0000 UTC m=+1252.831098909 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift") pod "swift-storage-0" (UID: "c98486bd-1325-4072-bce0-a28d38ecead2") : configmap "swift-ring-files" not found Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:12.806304 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bb99-account-create-update-ppmpk" event={"ID":"26a00786-e81f-488f-88fe-ffc98c7f63e2","Type":"ContainerStarted","Data":"c524f9b749d57c28f17f29e11e903f8e5260431a4f53131270748de0ffebc39b"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:13.774509 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/placement-operator-controller-manager-648564c9fc-p8w7q" podUID="39722076-5ed2-4e53-bb1d-d2a8bc73b825" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.76:8081/readyz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:13.814647 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-w76h9" event={"ID":"4d108c06-2f21-4a05-87ab-caccb04415b5","Type":"ContainerStarted","Data":"169913695a138bd02677d1e20fa011f154144a9f4deab2e47ead527cf5681537"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:14.474218 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: E0227 08:49:14.474496 4906 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 27 08:49:23 crc kubenswrapper[4906]: E0227 08:49:14.474548 4906 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 27 08:49:23 crc kubenswrapper[4906]: E0227 08:49:14.474637 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift podName:c98486bd-1325-4072-bce0-a28d38ecead2 nodeName:}" failed. No retries permitted until 2026-02-27 08:49:18.474609352 +0000 UTC m=+1256.869010982 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift") pod "swift-storage-0" (UID: "c98486bd-1325-4072-bce0-a28d38ecead2") : configmap "swift-ring-files" not found Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:15.861208 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-w76h9" podStartSLOduration=8.861185564 podStartE2EDuration="8.861185564s" podCreationTimestamp="2026-02-27 08:49:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:49:15.857326663 +0000 UTC m=+1254.251728273" watchObservedRunningTime="2026-02-27 08:49:15.861185564 +0000 UTC m=+1254.255587174" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:16.848740 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5","Type":"ContainerStarted","Data":"5df7b001615b1d13e96400ca31097b7190334a301e861f5098d45364711254be"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:17.861181 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bb99-account-create-update-ppmpk" event={"ID":"26a00786-e81f-488f-88fe-ffc98c7f63e2","Type":"ContainerStarted","Data":"f09d019ee4bc3645d19f3ec00bdfeeec67de0399d70f87e08cd0a02989271a7b"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:17.865185 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" event={"ID":"1e02b99f-7f4c-43b3-9f00-422041bb0c7c","Type":"ContainerStarted","Data":"234b3f11147ea18a2437c76efbb855fdc97859c64374fbd4ea8763696877e28f"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:17.865483 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:17.870174 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" event={"ID":"9364b7c4-2498-4db2-b3ad-f4c229c03ee5","Type":"ContainerStarted","Data":"74c3f95f2d92431194a750815d9afa2bc66508b3ce5e66441512a2d7b7f75a29"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:17.870411 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:17.902949 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" podStartSLOduration=15.902913928 podStartE2EDuration="15.902913928s" podCreationTimestamp="2026-02-27 08:49:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:49:17.887171345 +0000 UTC m=+1256.281572955" watchObservedRunningTime="2026-02-27 08:49:17.902913928 +0000 UTC m=+1256.297315538" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:17.924873 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=10.108381182 podStartE2EDuration="1m9.924842854s" podCreationTimestamp="2026-02-27 08:48:08 +0000 UTC" firstStartedPulling="2026-02-27 08:48:14.956221646 +0000 UTC m=+1193.350623266" lastFinishedPulling="2026-02-27 08:49:14.772683328 +0000 UTC m=+1253.167084938" observedRunningTime="2026-02-27 08:49:17.918589119 +0000 UTC m=+1256.312990729" watchObservedRunningTime="2026-02-27 08:49:17.924842854 +0000 UTC m=+1256.319244464" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:18.553058 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:23 crc kubenswrapper[4906]: E0227 08:49:18.553279 4906 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 27 08:49:23 crc kubenswrapper[4906]: E0227 08:49:18.553304 4906 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 27 08:49:23 crc kubenswrapper[4906]: E0227 08:49:18.553375 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift podName:c98486bd-1325-4072-bce0-a28d38ecead2 nodeName:}" failed. No retries permitted until 2026-02-27 08:49:26.553357444 +0000 UTC m=+1264.947759054 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift") pod "swift-storage-0" (UID: "c98486bd-1325-4072-bce0-a28d38ecead2") : configmap "swift-ring-files" not found Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:18.890263 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" podUID="9364b7c4-2498-4db2-b3ad-f4c229c03ee5" containerName="dnsmasq-dns" containerID="cri-o://74c3f95f2d92431194a750815d9afa2bc66508b3ce5e66441512a2d7b7f75a29" gracePeriod=10 Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:18.891707 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:18.925644 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" podStartSLOduration=17.925618467 podStartE2EDuration="17.925618467s" podCreationTimestamp="2026-02-27 08:49:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:49:18.916093347 +0000 UTC m=+1257.310494977" watchObservedRunningTime="2026-02-27 08:49:18.925618467 +0000 UTC m=+1257.320020067" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:18.948600 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bb99-account-create-update-ppmpk" podStartSLOduration=11.948569259 podStartE2EDuration="11.948569259s" podCreationTimestamp="2026-02-27 08:49:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:49:18.935100366 +0000 UTC m=+1257.329501996" watchObservedRunningTime="2026-02-27 08:49:18.948569259 +0000 UTC m=+1257.342970859" Feb 27 08:49:23 crc kubenswrapper[4906]: E0227 08:49:19.802640 4906 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9364b7c4_2498_4db2_b3ad_f4c229c03ee5.slice/crio-conmon-74c3f95f2d92431194a750815d9afa2bc66508b3ce5e66441512a2d7b7f75a29.scope\": RecentStats: unable to find data in memory cache]" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:19.900639 4906 generic.go:334] "Generic (PLEG): container finished" podID="b44567c9-a11c-4760-8ee3-43d4c96fa180" containerID="27300d3014c44c93b3e33ddff36683a17ed53775e270bf2a9fa6521738b5f303" exitCode=0 Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:19.900730 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-zgnvf" event={"ID":"b44567c9-a11c-4760-8ee3-43d4c96fa180","Type":"ContainerDied","Data":"27300d3014c44c93b3e33ddff36683a17ed53775e270bf2a9fa6521738b5f303"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:19.904164 4906 generic.go:334] "Generic (PLEG): container finished" podID="9364b7c4-2498-4db2-b3ad-f4c229c03ee5" containerID="74c3f95f2d92431194a750815d9afa2bc66508b3ce5e66441512a2d7b7f75a29" exitCode=0 Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:19.904192 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" event={"ID":"9364b7c4-2498-4db2-b3ad-f4c229c03ee5","Type":"ContainerDied","Data":"74c3f95f2d92431194a750815d9afa2bc66508b3ce5e66441512a2d7b7f75a29"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:20.917742 4906 generic.go:334] "Generic (PLEG): container finished" podID="752c995e-5d01-4705-ab26-be06da61290d" containerID="6767fed6a0f4f2867823c279799ee9b4addfbdd810330c7bd2d488d2b5313690" exitCode=0 Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:20.918153 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"752c995e-5d01-4705-ab26-be06da61290d","Type":"ContainerDied","Data":"6767fed6a0f4f2867823c279799ee9b4addfbdd810330c7bd2d488d2b5313690"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:22.312150 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" podUID="9364b7c4-2498-4db2-b3ad-f4c229c03ee5" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.112:5353: connect: connection refused" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:22.602121 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:22.812788 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-67lpp" podUID="bd97ec8d-9d8f-4817-9770-d5392d4f60df" containerName="ovn-controller" probeResult="failure" output=< Feb 27 08:49:23 crc kubenswrapper[4906]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 27 08:49:23 crc kubenswrapper[4906]: > Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:22.936113 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"752c995e-5d01-4705-ab26-be06da61290d","Type":"ContainerStarted","Data":"c55e45eebfb36463a729e750878b578c65a99b1c90cf40c11b9d855a051e1b92"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.521286 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-zgnvf" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.549986 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.599936 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-jdnzx"] Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.620101 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-6jk9r"] Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.628262 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.631985 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-1cea-account-create-update-s8q8n"] Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.639062 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-bx2nz"] Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.668426 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gsh9x\" (UniqueName: \"kubernetes.io/projected/b44567c9-a11c-4760-8ee3-43d4c96fa180-kube-api-access-gsh9x\") pod \"b44567c9-a11c-4760-8ee3-43d4c96fa180\" (UID: \"b44567c9-a11c-4760-8ee3-43d4c96fa180\") " Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.668541 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-dns-svc\") pod \"9364b7c4-2498-4db2-b3ad-f4c229c03ee5\" (UID: \"9364b7c4-2498-4db2-b3ad-f4c229c03ee5\") " Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.668677 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cjxvj\" (UniqueName: \"kubernetes.io/projected/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-kube-api-access-cjxvj\") pod \"9364b7c4-2498-4db2-b3ad-f4c229c03ee5\" (UID: \"9364b7c4-2498-4db2-b3ad-f4c229c03ee5\") " Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.668806 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-ovsdbserver-nb\") pod \"9364b7c4-2498-4db2-b3ad-f4c229c03ee5\" (UID: \"9364b7c4-2498-4db2-b3ad-f4c229c03ee5\") " Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.668839 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b44567c9-a11c-4760-8ee3-43d4c96fa180-operator-scripts\") pod \"b44567c9-a11c-4760-8ee3-43d4c96fa180\" (UID: \"b44567c9-a11c-4760-8ee3-43d4c96fa180\") " Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.668912 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-config\") pod \"9364b7c4-2498-4db2-b3ad-f4c229c03ee5\" (UID: \"9364b7c4-2498-4db2-b3ad-f4c229c03ee5\") " Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.673180 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b44567c9-a11c-4760-8ee3-43d4c96fa180-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b44567c9-a11c-4760-8ee3-43d4c96fa180" (UID: "b44567c9-a11c-4760-8ee3-43d4c96fa180"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.678057 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-kube-api-access-cjxvj" (OuterVolumeSpecName: "kube-api-access-cjxvj") pod "9364b7c4-2498-4db2-b3ad-f4c229c03ee5" (UID: "9364b7c4-2498-4db2-b3ad-f4c229c03ee5"). InnerVolumeSpecName "kube-api-access-cjxvj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.681837 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b44567c9-a11c-4760-8ee3-43d4c96fa180-kube-api-access-gsh9x" (OuterVolumeSpecName: "kube-api-access-gsh9x") pod "b44567c9-a11c-4760-8ee3-43d4c96fa180" (UID: "b44567c9-a11c-4760-8ee3-43d4c96fa180"). InnerVolumeSpecName "kube-api-access-gsh9x". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.720803 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9364b7c4-2498-4db2-b3ad-f4c229c03ee5" (UID: "9364b7c4-2498-4db2-b3ad-f4c229c03ee5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.727947 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-config" (OuterVolumeSpecName: "config") pod "9364b7c4-2498-4db2-b3ad-f4c229c03ee5" (UID: "9364b7c4-2498-4db2-b3ad-f4c229c03ee5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.728825 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9364b7c4-2498-4db2-b3ad-f4c229c03ee5" (UID: "9364b7c4-2498-4db2-b3ad-f4c229c03ee5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.778855 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.778948 4906 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b44567c9-a11c-4760-8ee3-43d4c96fa180-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.778972 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.778988 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gsh9x\" (UniqueName: \"kubernetes.io/projected/b44567c9-a11c-4760-8ee3-43d4c96fa180-kube-api-access-gsh9x\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.779002 4906 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.779018 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cjxvj\" (UniqueName: \"kubernetes.io/projected/9364b7c4-2498-4db2-b3ad-f4c229c03ee5-kube-api-access-cjxvj\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.949330 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-6jk9r" event={"ID":"db586541-2471-4a37-a7b6-3c8f324a696b","Type":"ContainerStarted","Data":"4167f53294c9406f0bf868cfedf142ed259d77488664892db3868186655908ef"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.955440 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" event={"ID":"9364b7c4-2498-4db2-b3ad-f4c229c03ee5","Type":"ContainerDied","Data":"bf0e90e91a99c88c88e6f805275873c4b349234b4ffc29baa62b96796c50e7a3"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.955534 4906 scope.go:117] "RemoveContainer" containerID="74c3f95f2d92431194a750815d9afa2bc66508b3ce5e66441512a2d7b7f75a29" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.955762 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-r4bd9" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.961420 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-1cea-account-create-update-s8q8n" event={"ID":"c1fe6339-ed29-4e33-a161-12b6c001a7e3","Type":"ContainerStarted","Data":"2f0d0c0e1f68a08c611958871dc056681c00a8fc93a6a0e8c3440fa054ab704a"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.965811 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-jdnzx" event={"ID":"a0cd4824-5d93-46a6-bb94-95a74d8eaeba","Type":"ContainerStarted","Data":"ee35b25216807ae78c06b997d40adda8382b4f8358bc509838aeeee63f129d0b"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.968549 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-zgnvf" event={"ID":"b44567c9-a11c-4760-8ee3-43d4c96fa180","Type":"ContainerDied","Data":"e3a15a3d858ab496b4f46aa934a4f19e57bb12265a396e63002f826bb5ad0db5"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.968609 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e3a15a3d858ab496b4f46aa934a4f19e57bb12265a396e63002f826bb5ad0db5" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.968603 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-zgnvf" Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.972911 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-bx2nz" event={"ID":"0eca1824-e2a4-4bac-bc2d-8e2035dae7c7","Type":"ContainerStarted","Data":"85d43647150d59dc9b11b1fd23e8d04ea072bc7d662441063184183c45865f04"} Feb 27 08:49:23 crc kubenswrapper[4906]: I0227 08:49:23.973061 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:49:24 crc kubenswrapper[4906]: I0227 08:49:24.005351 4906 scope.go:117] "RemoveContainer" containerID="1359b9fd2473e4346d8388f31e95a9d81023cae2cce741aed2d418b50df9ebc3" Feb 27 08:49:24 crc kubenswrapper[4906]: I0227 08:49:24.021103 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=40.291774011 podStartE2EDuration="1m22.021072422s" podCreationTimestamp="2026-02-27 08:48:02 +0000 UTC" firstStartedPulling="2026-02-27 08:48:04.741993375 +0000 UTC m=+1183.136394985" lastFinishedPulling="2026-02-27 08:48:46.471291786 +0000 UTC m=+1224.865693396" observedRunningTime="2026-02-27 08:49:24.005333989 +0000 UTC m=+1262.399735819" watchObservedRunningTime="2026-02-27 08:49:24.021072422 +0000 UTC m=+1262.415474052" Feb 27 08:49:24 crc kubenswrapper[4906]: I0227 08:49:24.045429 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-r4bd9"] Feb 27 08:49:24 crc kubenswrapper[4906]: I0227 08:49:24.053865 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-r4bd9"] Feb 27 08:49:24 crc kubenswrapper[4906]: I0227 08:49:24.564162 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9364b7c4-2498-4db2-b3ad-f4c229c03ee5" path="/var/lib/kubelet/pods/9364b7c4-2498-4db2-b3ad-f4c229c03ee5/volumes" Feb 27 08:49:24 crc kubenswrapper[4906]: I0227 08:49:24.985865 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-1cea-account-create-update-s8q8n" event={"ID":"c1fe6339-ed29-4e33-a161-12b6c001a7e3","Type":"ContainerStarted","Data":"8d87d3c3796598f91e5eba295fbc2c4f9f24fa68398c63042d8ec5bc4400fbf1"} Feb 27 08:49:24 crc kubenswrapper[4906]: I0227 08:49:24.992476 4906 generic.go:334] "Generic (PLEG): container finished" podID="a0cd4824-5d93-46a6-bb94-95a74d8eaeba" containerID="747541d750590e9a263f47272ccb6729e4ca0a5e89779a9084477c8b6d9e9ec1" exitCode=0 Feb 27 08:49:24 crc kubenswrapper[4906]: I0227 08:49:24.992583 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-jdnzx" event={"ID":"a0cd4824-5d93-46a6-bb94-95a74d8eaeba","Type":"ContainerDied","Data":"747541d750590e9a263f47272ccb6729e4ca0a5e89779a9084477c8b6d9e9ec1"} Feb 27 08:49:24 crc kubenswrapper[4906]: I0227 08:49:24.996625 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-bx2nz" event={"ID":"0eca1824-e2a4-4bac-bc2d-8e2035dae7c7","Type":"ContainerStarted","Data":"11e7587ebabfb6f857126ea2c87788cfa2f8da5ab1ce6d8dcddcd1749aaa076e"} Feb 27 08:49:25 crc kubenswrapper[4906]: I0227 08:49:25.019981 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-1cea-account-create-update-s8q8n" podStartSLOduration=17.019952866 podStartE2EDuration="17.019952866s" podCreationTimestamp="2026-02-27 08:49:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:49:25.005416195 +0000 UTC m=+1263.399817805" watchObservedRunningTime="2026-02-27 08:49:25.019952866 +0000 UTC m=+1263.414354486" Feb 27 08:49:25 crc kubenswrapper[4906]: I0227 08:49:25.031576 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-bx2nz" podStartSLOduration=17.031545411 podStartE2EDuration="17.031545411s" podCreationTimestamp="2026-02-27 08:49:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:49:25.021673981 +0000 UTC m=+1263.416075601" watchObservedRunningTime="2026-02-27 08:49:25.031545411 +0000 UTC m=+1263.425947021" Feb 27 08:49:26 crc kubenswrapper[4906]: I0227 08:49:26.361267 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Feb 27 08:49:26 crc kubenswrapper[4906]: I0227 08:49:26.456313 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="960971c6-e3d1-458e-9991-91cbcbeb9d5e" containerName="galera" probeResult="failure" output=< Feb 27 08:49:26 crc kubenswrapper[4906]: wsrep_local_state_comment (Joined) differs from Synced Feb 27 08:49:26 crc kubenswrapper[4906]: > Feb 27 08:49:26 crc kubenswrapper[4906]: I0227 08:49:26.639547 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:26 crc kubenswrapper[4906]: E0227 08:49:26.640736 4906 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 27 08:49:26 crc kubenswrapper[4906]: E0227 08:49:26.640785 4906 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 27 08:49:26 crc kubenswrapper[4906]: E0227 08:49:26.640837 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift podName:c98486bd-1325-4072-bce0-a28d38ecead2 nodeName:}" failed. No retries permitted until 2026-02-27 08:49:42.6408178 +0000 UTC m=+1281.035219410 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift") pod "swift-storage-0" (UID: "c98486bd-1325-4072-bce0-a28d38ecead2") : configmap "swift-ring-files" not found Feb 27 08:49:26 crc kubenswrapper[4906]: I0227 08:49:26.729554 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/openstack-cell1-galera-0" podUID="960971c6-e3d1-458e-9991-91cbcbeb9d5e" containerName="galera" probeResult="failure" output=< Feb 27 08:49:26 crc kubenswrapper[4906]: wsrep_local_state_comment (Joined) differs from Synced Feb 27 08:49:26 crc kubenswrapper[4906]: > Feb 27 08:49:27 crc kubenswrapper[4906]: I0227 08:49:27.017579 4906 generic.go:334] "Generic (PLEG): container finished" podID="288d23ef-ae52-4275-a827-ebf77b2823ea" containerID="648e3776fe4d7a47348f9297e5b57825d7f70d4d2e766a296dfac824e916eb9b" exitCode=0 Feb 27 08:49:27 crc kubenswrapper[4906]: I0227 08:49:27.017639 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"288d23ef-ae52-4275-a827-ebf77b2823ea","Type":"ContainerDied","Data":"648e3776fe4d7a47348f9297e5b57825d7f70d4d2e766a296dfac824e916eb9b"} Feb 27 08:49:27 crc kubenswrapper[4906]: I0227 08:49:27.636396 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-67lpp" podUID="bd97ec8d-9d8f-4817-9770-d5392d4f60df" containerName="ovn-controller" probeResult="failure" output=< Feb 27 08:49:27 crc kubenswrapper[4906]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Feb 27 08:49:27 crc kubenswrapper[4906]: > Feb 27 08:49:27 crc kubenswrapper[4906]: I0227 08:49:27.673857 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:49:27 crc kubenswrapper[4906]: I0227 08:49:27.677375 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-h6fvw" Feb 27 08:49:27 crc kubenswrapper[4906]: I0227 08:49:27.945340 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-67lpp-config-hpln9"] Feb 27 08:49:27 crc kubenswrapper[4906]: E0227 08:49:27.946368 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b44567c9-a11c-4760-8ee3-43d4c96fa180" containerName="mariadb-database-create" Feb 27 08:49:27 crc kubenswrapper[4906]: I0227 08:49:27.946402 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="b44567c9-a11c-4760-8ee3-43d4c96fa180" containerName="mariadb-database-create" Feb 27 08:49:27 crc kubenswrapper[4906]: E0227 08:49:27.946423 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9364b7c4-2498-4db2-b3ad-f4c229c03ee5" containerName="init" Feb 27 08:49:27 crc kubenswrapper[4906]: I0227 08:49:27.946429 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="9364b7c4-2498-4db2-b3ad-f4c229c03ee5" containerName="init" Feb 27 08:49:27 crc kubenswrapper[4906]: E0227 08:49:27.946441 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9364b7c4-2498-4db2-b3ad-f4c229c03ee5" containerName="dnsmasq-dns" Feb 27 08:49:27 crc kubenswrapper[4906]: I0227 08:49:27.946448 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="9364b7c4-2498-4db2-b3ad-f4c229c03ee5" containerName="dnsmasq-dns" Feb 27 08:49:27 crc kubenswrapper[4906]: I0227 08:49:27.946667 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="9364b7c4-2498-4db2-b3ad-f4c229c03ee5" containerName="dnsmasq-dns" Feb 27 08:49:27 crc kubenswrapper[4906]: I0227 08:49:27.946685 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="b44567c9-a11c-4760-8ee3-43d4c96fa180" containerName="mariadb-database-create" Feb 27 08:49:27 crc kubenswrapper[4906]: I0227 08:49:27.947423 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:27 crc kubenswrapper[4906]: I0227 08:49:27.951176 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Feb 27 08:49:27 crc kubenswrapper[4906]: I0227 08:49:27.966074 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1ee4e121-47a7-4bfa-93ed-d12e521a7238-var-run\") pod \"ovn-controller-67lpp-config-hpln9\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:27 crc kubenswrapper[4906]: I0227 08:49:27.966254 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1ee4e121-47a7-4bfa-93ed-d12e521a7238-var-run-ovn\") pod \"ovn-controller-67lpp-config-hpln9\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:27 crc kubenswrapper[4906]: I0227 08:49:27.966292 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zpv5\" (UniqueName: \"kubernetes.io/projected/1ee4e121-47a7-4bfa-93ed-d12e521a7238-kube-api-access-9zpv5\") pod \"ovn-controller-67lpp-config-hpln9\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:27 crc kubenswrapper[4906]: I0227 08:49:27.966325 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1ee4e121-47a7-4bfa-93ed-d12e521a7238-var-log-ovn\") pod \"ovn-controller-67lpp-config-hpln9\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:27 crc kubenswrapper[4906]: I0227 08:49:27.966377 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ee4e121-47a7-4bfa-93ed-d12e521a7238-scripts\") pod \"ovn-controller-67lpp-config-hpln9\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:27 crc kubenswrapper[4906]: I0227 08:49:27.966400 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1ee4e121-47a7-4bfa-93ed-d12e521a7238-additional-scripts\") pod \"ovn-controller-67lpp-config-hpln9\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:27 crc kubenswrapper[4906]: I0227 08:49:27.966560 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-67lpp-config-hpln9"] Feb 27 08:49:28 crc kubenswrapper[4906]: I0227 08:49:28.028360 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"288d23ef-ae52-4275-a827-ebf77b2823ea","Type":"ContainerStarted","Data":"372fbf4be4107581f65f344e2a64dfbaa7aba41eff73f9f1bda2902cf736d938"} Feb 27 08:49:28 crc kubenswrapper[4906]: I0227 08:49:28.030072 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-jdnzx" event={"ID":"a0cd4824-5d93-46a6-bb94-95a74d8eaeba","Type":"ContainerStarted","Data":"ffbe1d16da29543e957d74d620229ed94f6508e5e376f5472869610979f02ab3"} Feb 27 08:49:28 crc kubenswrapper[4906]: I0227 08:49:28.068821 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1ee4e121-47a7-4bfa-93ed-d12e521a7238-var-run-ovn\") pod \"ovn-controller-67lpp-config-hpln9\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:28 crc kubenswrapper[4906]: I0227 08:49:28.068898 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zpv5\" (UniqueName: \"kubernetes.io/projected/1ee4e121-47a7-4bfa-93ed-d12e521a7238-kube-api-access-9zpv5\") pod \"ovn-controller-67lpp-config-hpln9\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:28 crc kubenswrapper[4906]: I0227 08:49:28.068937 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1ee4e121-47a7-4bfa-93ed-d12e521a7238-var-log-ovn\") pod \"ovn-controller-67lpp-config-hpln9\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:28 crc kubenswrapper[4906]: I0227 08:49:28.068991 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ee4e121-47a7-4bfa-93ed-d12e521a7238-scripts\") pod \"ovn-controller-67lpp-config-hpln9\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:28 crc kubenswrapper[4906]: I0227 08:49:28.069015 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1ee4e121-47a7-4bfa-93ed-d12e521a7238-additional-scripts\") pod \"ovn-controller-67lpp-config-hpln9\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:28 crc kubenswrapper[4906]: I0227 08:49:28.069095 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1ee4e121-47a7-4bfa-93ed-d12e521a7238-var-run\") pod \"ovn-controller-67lpp-config-hpln9\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:28 crc kubenswrapper[4906]: I0227 08:49:28.069147 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1ee4e121-47a7-4bfa-93ed-d12e521a7238-var-run-ovn\") pod \"ovn-controller-67lpp-config-hpln9\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:28 crc kubenswrapper[4906]: I0227 08:49:28.070853 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1ee4e121-47a7-4bfa-93ed-d12e521a7238-additional-scripts\") pod \"ovn-controller-67lpp-config-hpln9\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:28 crc kubenswrapper[4906]: I0227 08:49:28.071051 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ee4e121-47a7-4bfa-93ed-d12e521a7238-scripts\") pod \"ovn-controller-67lpp-config-hpln9\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:28 crc kubenswrapper[4906]: I0227 08:49:28.071105 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1ee4e121-47a7-4bfa-93ed-d12e521a7238-var-log-ovn\") pod \"ovn-controller-67lpp-config-hpln9\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:28 crc kubenswrapper[4906]: I0227 08:49:28.071148 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1ee4e121-47a7-4bfa-93ed-d12e521a7238-var-run\") pod \"ovn-controller-67lpp-config-hpln9\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:28 crc kubenswrapper[4906]: I0227 08:49:28.102406 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zpv5\" (UniqueName: \"kubernetes.io/projected/1ee4e121-47a7-4bfa-93ed-d12e521a7238-kube-api-access-9zpv5\") pod \"ovn-controller-67lpp-config-hpln9\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:28 crc kubenswrapper[4906]: I0227 08:49:28.271854 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:28 crc kubenswrapper[4906]: I0227 08:49:28.793379 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-67lpp-config-hpln9"] Feb 27 08:49:28 crc kubenswrapper[4906]: W0227 08:49:28.794064 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ee4e121_47a7_4bfa_93ed_d12e521a7238.slice/crio-660db3133a670fb76ee27186f594d2470925e57c0e4e55a7193ce4609111e533 WatchSource:0}: Error finding container 660db3133a670fb76ee27186f594d2470925e57c0e4e55a7193ce4609111e533: Status 404 returned error can't find the container with id 660db3133a670fb76ee27186f594d2470925e57c0e4e55a7193ce4609111e533 Feb 27 08:49:29 crc kubenswrapper[4906]: I0227 08:49:29.045930 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"24bec749-145e-48db-b5f8-ae7b57a5aaa3","Type":"ContainerStarted","Data":"90a0f7a62218d60693cee0e64e4e2c920084974d77909c5e78659e7f3ee6dee6"} Feb 27 08:49:29 crc kubenswrapper[4906]: I0227 08:49:29.046448 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"24bec749-145e-48db-b5f8-ae7b57a5aaa3","Type":"ContainerStarted","Data":"39acab4ad39d2ef37b0edddfad84d5a4374261584dd1a4f7c21773f151ac2da6"} Feb 27 08:49:29 crc kubenswrapper[4906]: I0227 08:49:29.046472 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Feb 27 08:49:29 crc kubenswrapper[4906]: I0227 08:49:29.048300 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-67lpp-config-hpln9" event={"ID":"1ee4e121-47a7-4bfa-93ed-d12e521a7238","Type":"ContainerStarted","Data":"660db3133a670fb76ee27186f594d2470925e57c0e4e55a7193ce4609111e533"} Feb 27 08:49:29 crc kubenswrapper[4906]: I0227 08:49:29.048355 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:49:29 crc kubenswrapper[4906]: I0227 08:49:29.048767 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Feb 27 08:49:29 crc kubenswrapper[4906]: I0227 08:49:29.074486 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=3.030983562 podStartE2EDuration="27.074467603s" podCreationTimestamp="2026-02-27 08:49:02 +0000 UTC" firstStartedPulling="2026-02-27 08:49:03.574605876 +0000 UTC m=+1241.969007486" lastFinishedPulling="2026-02-27 08:49:27.618089917 +0000 UTC m=+1266.012491527" observedRunningTime="2026-02-27 08:49:29.072674545 +0000 UTC m=+1267.467076155" watchObservedRunningTime="2026-02-27 08:49:29.074467603 +0000 UTC m=+1267.468869213" Feb 27 08:49:29 crc kubenswrapper[4906]: I0227 08:49:29.131840 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=-9223371950.72296 podStartE2EDuration="1m26.131816648s" podCreationTimestamp="2026-02-27 08:48:03 +0000 UTC" firstStartedPulling="2026-02-27 08:48:05.101283298 +0000 UTC m=+1183.495684908" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:49:29.12806133 +0000 UTC m=+1267.522462960" watchObservedRunningTime="2026-02-27 08:49:29.131816648 +0000 UTC m=+1267.526218258" Feb 27 08:49:29 crc kubenswrapper[4906]: I0227 08:49:29.150777 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-jdnzx" podStartSLOduration=20.150751075 podStartE2EDuration="20.150751075s" podCreationTimestamp="2026-02-27 08:49:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:49:29.146775041 +0000 UTC m=+1267.541176651" watchObservedRunningTime="2026-02-27 08:49:29.150751075 +0000 UTC m=+1267.545152685" Feb 27 08:49:29 crc kubenswrapper[4906]: I0227 08:49:29.377594 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Feb 27 08:49:30 crc kubenswrapper[4906]: I0227 08:49:30.060762 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-67lpp-config-hpln9" event={"ID":"1ee4e121-47a7-4bfa-93ed-d12e521a7238","Type":"ContainerStarted","Data":"c90b582ab85e0cdce4e8a8b87c0e89e34ac759cafeb76062c99ead1c13a7a5ef"} Feb 27 08:49:30 crc kubenswrapper[4906]: I0227 08:49:30.092897 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-67lpp-config-hpln9" podStartSLOduration=3.092861158 podStartE2EDuration="3.092861158s" podCreationTimestamp="2026-02-27 08:49:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:49:30.089365377 +0000 UTC m=+1268.483766977" watchObservedRunningTime="2026-02-27 08:49:30.092861158 +0000 UTC m=+1268.487262768" Feb 27 08:49:31 crc kubenswrapper[4906]: I0227 08:49:31.071276 4906 generic.go:334] "Generic (PLEG): container finished" podID="4d108c06-2f21-4a05-87ab-caccb04415b5" containerID="169913695a138bd02677d1e20fa011f154144a9f4deab2e47ead527cf5681537" exitCode=0 Feb 27 08:49:31 crc kubenswrapper[4906]: I0227 08:49:31.071382 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-w76h9" event={"ID":"4d108c06-2f21-4a05-87ab-caccb04415b5","Type":"ContainerDied","Data":"169913695a138bd02677d1e20fa011f154144a9f4deab2e47ead527cf5681537"} Feb 27 08:49:31 crc kubenswrapper[4906]: I0227 08:49:31.076127 4906 generic.go:334] "Generic (PLEG): container finished" podID="19570524-db36-4ef4-893a-fc108043dd6d" containerID="043eef2a73f282b18bd7c046ef10a369c35da12a4fa980643dcecde7b720cba1" exitCode=0 Feb 27 08:49:31 crc kubenswrapper[4906]: I0227 08:49:31.076201 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-nbf48" event={"ID":"19570524-db36-4ef4-893a-fc108043dd6d","Type":"ContainerDied","Data":"043eef2a73f282b18bd7c046ef10a369c35da12a4fa980643dcecde7b720cba1"} Feb 27 08:49:31 crc kubenswrapper[4906]: I0227 08:49:31.078148 4906 generic.go:334] "Generic (PLEG): container finished" podID="1ee4e121-47a7-4bfa-93ed-d12e521a7238" containerID="c90b582ab85e0cdce4e8a8b87c0e89e34ac759cafeb76062c99ead1c13a7a5ef" exitCode=0 Feb 27 08:49:31 crc kubenswrapper[4906]: I0227 08:49:31.078181 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-67lpp-config-hpln9" event={"ID":"1ee4e121-47a7-4bfa-93ed-d12e521a7238","Type":"ContainerDied","Data":"c90b582ab85e0cdce4e8a8b87c0e89e34ac759cafeb76062c99ead1c13a7a5ef"} Feb 27 08:49:32 crc kubenswrapper[4906]: I0227 08:49:32.613614 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-67lpp" Feb 27 08:49:33 crc kubenswrapper[4906]: I0227 08:49:33.110958 4906 generic.go:334] "Generic (PLEG): container finished" podID="0eca1824-e2a4-4bac-bc2d-8e2035dae7c7" containerID="11e7587ebabfb6f857126ea2c87788cfa2f8da5ab1ce6d8dcddcd1749aaa076e" exitCode=0 Feb 27 08:49:33 crc kubenswrapper[4906]: I0227 08:49:33.111018 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-bx2nz" event={"ID":"0eca1824-e2a4-4bac-bc2d-8e2035dae7c7","Type":"ContainerDied","Data":"11e7587ebabfb6f857126ea2c87788cfa2f8da5ab1ce6d8dcddcd1749aaa076e"} Feb 27 08:49:33 crc kubenswrapper[4906]: I0227 08:49:33.114924 4906 generic.go:334] "Generic (PLEG): container finished" podID="69ae1e99-afff-4a95-bc00-d1891e12976d" containerID="cc88132b664db7e01dd9c0d0a568257692b261fa19a6d9b1d02e77ef161c86b8" exitCode=0 Feb 27 08:49:33 crc kubenswrapper[4906]: I0227 08:49:33.115113 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2a5c-account-create-update-vfl7s" event={"ID":"69ae1e99-afff-4a95-bc00-d1891e12976d","Type":"ContainerDied","Data":"cc88132b664db7e01dd9c0d0a568257692b261fa19a6d9b1d02e77ef161c86b8"} Feb 27 08:49:34 crc kubenswrapper[4906]: I0227 08:49:34.173706 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="752c995e-5d01-4705-ab26-be06da61290d" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.102:5671: connect: connection refused" Feb 27 08:49:34 crc kubenswrapper[4906]: I0227 08:49:34.841272 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:49:34 crc kubenswrapper[4906]: I0227 08:49:34.961833 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-tbrl8"] Feb 27 08:49:34 crc kubenswrapper[4906]: I0227 08:49:34.967737 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" podUID="1e02b99f-7f4c-43b3-9f00-422041bb0c7c" containerName="dnsmasq-dns" containerID="cri-o://234b3f11147ea18a2437c76efbb855fdc97859c64374fbd4ea8763696877e28f" gracePeriod=10 Feb 27 08:49:35 crc kubenswrapper[4906]: I0227 08:49:35.135520 4906 generic.go:334] "Generic (PLEG): container finished" podID="26a00786-e81f-488f-88fe-ffc98c7f63e2" containerID="f09d019ee4bc3645d19f3ec00bdfeeec67de0399d70f87e08cd0a02989271a7b" exitCode=0 Feb 27 08:49:35 crc kubenswrapper[4906]: I0227 08:49:35.135577 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bb99-account-create-update-ppmpk" event={"ID":"26a00786-e81f-488f-88fe-ffc98c7f63e2","Type":"ContainerDied","Data":"f09d019ee4bc3645d19f3ec00bdfeeec67de0399d70f87e08cd0a02989271a7b"} Feb 27 08:49:36 crc kubenswrapper[4906]: I0227 08:49:36.149034 4906 generic.go:334] "Generic (PLEG): container finished" podID="1e02b99f-7f4c-43b3-9f00-422041bb0c7c" containerID="234b3f11147ea18a2437c76efbb855fdc97859c64374fbd4ea8763696877e28f" exitCode=0 Feb 27 08:49:36 crc kubenswrapper[4906]: I0227 08:49:36.149117 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" event={"ID":"1e02b99f-7f4c-43b3-9f00-422041bb0c7c","Type":"ContainerDied","Data":"234b3f11147ea18a2437c76efbb855fdc97859c64374fbd4ea8763696877e28f"} Feb 27 08:49:36 crc kubenswrapper[4906]: I0227 08:49:36.153938 4906 generic.go:334] "Generic (PLEG): container finished" podID="c1fe6339-ed29-4e33-a161-12b6c001a7e3" containerID="8d87d3c3796598f91e5eba295fbc2c4f9f24fa68398c63042d8ec5bc4400fbf1" exitCode=0 Feb 27 08:49:36 crc kubenswrapper[4906]: I0227 08:49:36.154025 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-1cea-account-create-update-s8q8n" event={"ID":"c1fe6339-ed29-4e33-a161-12b6c001a7e3","Type":"ContainerDied","Data":"8d87d3c3796598f91e5eba295fbc2c4f9f24fa68398c63042d8ec5bc4400fbf1"} Feb 27 08:49:36 crc kubenswrapper[4906]: I0227 08:49:36.709494 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Feb 27 08:49:37 crc kubenswrapper[4906]: I0227 08:49:37.582745 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" podUID="1e02b99f-7f4c-43b3-9f00-422041bb0c7c" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.115:5353: connect: connection refused" Feb 27 08:49:40 crc kubenswrapper[4906]: E0227 08:49:40.225216 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified" Feb 27 08:49:40 crc kubenswrapper[4906]: E0227 08:49:40.226025 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:swift-ring-rebalance,Image:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,Command:[/usr/local/bin/swift-ring-tool all],Args:[],WorkingDir:/etc/swift,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CM_NAME,Value:swift-ring-files,ValueFrom:nil,},EnvVar{Name:NAMESPACE,Value:openstack,ValueFrom:nil,},EnvVar{Name:OWNER_APIVERSION,Value:swift.openstack.org/v1beta1,ValueFrom:nil,},EnvVar{Name:OWNER_KIND,Value:SwiftRing,ValueFrom:nil,},EnvVar{Name:OWNER_NAME,Value:swift-ring,ValueFrom:nil,},EnvVar{Name:OWNER_UID,Value:4428db25-625e-4e79-9a1f-b0820fd9ad19,ValueFrom:nil,},EnvVar{Name:SWIFT_MIN_PART_HOURS,Value:1,ValueFrom:nil,},EnvVar{Name:SWIFT_PART_POWER,Value:10,ValueFrom:nil,},EnvVar{Name:SWIFT_REPLICAS,Value:1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/swift-ring-tool,SubPath:swift-ring-tool,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:swiftconf,ReadOnly:true,MountPath:/etc/swift/swift.conf,SubPath:swift.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:etc-swift,ReadOnly:false,MountPath:/etc/swift,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ring-data-devices,ReadOnly:true,MountPath:/var/lib/config-data/ring-devices,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dispersionconf,ReadOnly:true,MountPath:/etc/swift/dispersion.conf,SubPath:dispersion.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5hpk8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42445,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-ring-rebalance-6jk9r_openstack(db586541-2471-4a37-a7b6-3c8f324a696b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:49:40 crc kubenswrapper[4906]: E0227 08:49:40.227261 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"swift-ring-rebalance\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/swift-ring-rebalance-6jk9r" podUID="db586541-2471-4a37-a7b6-3c8f324a696b" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.344660 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-w76h9" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.353562 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bb99-account-create-update-ppmpk" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.358028 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-nbf48" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.368098 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2a5c-account-create-update-vfl7s" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.385678 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.402806 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1cea-account-create-update-s8q8n" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.414149 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-bx2nz" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.443639 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26a00786-e81f-488f-88fe-ffc98c7f63e2-operator-scripts\") pod \"26a00786-e81f-488f-88fe-ffc98c7f63e2\" (UID: \"26a00786-e81f-488f-88fe-ffc98c7f63e2\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.443685 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d108c06-2f21-4a05-87ab-caccb04415b5-operator-scripts\") pod \"4d108c06-2f21-4a05-87ab-caccb04415b5\" (UID: \"4d108c06-2f21-4a05-87ab-caccb04415b5\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.443761 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sxkxr\" (UniqueName: \"kubernetes.io/projected/19570524-db36-4ef4-893a-fc108043dd6d-kube-api-access-sxkxr\") pod \"19570524-db36-4ef4-893a-fc108043dd6d\" (UID: \"19570524-db36-4ef4-893a-fc108043dd6d\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.443820 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h2hng\" (UniqueName: \"kubernetes.io/projected/26a00786-e81f-488f-88fe-ffc98c7f63e2-kube-api-access-h2hng\") pod \"26a00786-e81f-488f-88fe-ffc98c7f63e2\" (UID: \"26a00786-e81f-488f-88fe-ffc98c7f63e2\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.443846 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bq7ll\" (UniqueName: \"kubernetes.io/projected/4d108c06-2f21-4a05-87ab-caccb04415b5-kube-api-access-bq7ll\") pod \"4d108c06-2f21-4a05-87ab-caccb04415b5\" (UID: \"4d108c06-2f21-4a05-87ab-caccb04415b5\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.444066 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19570524-db36-4ef4-893a-fc108043dd6d-operator-scripts\") pod \"19570524-db36-4ef4-893a-fc108043dd6d\" (UID: \"19570524-db36-4ef4-893a-fc108043dd6d\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.445642 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/26a00786-e81f-488f-88fe-ffc98c7f63e2-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "26a00786-e81f-488f-88fe-ffc98c7f63e2" (UID: "26a00786-e81f-488f-88fe-ffc98c7f63e2"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.446943 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4d108c06-2f21-4a05-87ab-caccb04415b5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4d108c06-2f21-4a05-87ab-caccb04415b5" (UID: "4d108c06-2f21-4a05-87ab-caccb04415b5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.454711 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19570524-db36-4ef4-893a-fc108043dd6d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "19570524-db36-4ef4-893a-fc108043dd6d" (UID: "19570524-db36-4ef4-893a-fc108043dd6d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.475044 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19570524-db36-4ef4-893a-fc108043dd6d-kube-api-access-sxkxr" (OuterVolumeSpecName: "kube-api-access-sxkxr") pod "19570524-db36-4ef4-893a-fc108043dd6d" (UID: "19570524-db36-4ef4-893a-fc108043dd6d"). InnerVolumeSpecName "kube-api-access-sxkxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.475166 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/26a00786-e81f-488f-88fe-ffc98c7f63e2-kube-api-access-h2hng" (OuterVolumeSpecName: "kube-api-access-h2hng") pod "26a00786-e81f-488f-88fe-ffc98c7f63e2" (UID: "26a00786-e81f-488f-88fe-ffc98c7f63e2"). InnerVolumeSpecName "kube-api-access-h2hng". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.475233 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4d108c06-2f21-4a05-87ab-caccb04415b5-kube-api-access-bq7ll" (OuterVolumeSpecName: "kube-api-access-bq7ll") pod "4d108c06-2f21-4a05-87ab-caccb04415b5" (UID: "4d108c06-2f21-4a05-87ab-caccb04415b5"). InnerVolumeSpecName "kube-api-access-bq7ll". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.546465 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0eca1824-e2a4-4bac-bc2d-8e2035dae7c7-operator-scripts\") pod \"0eca1824-e2a4-4bac-bc2d-8e2035dae7c7\" (UID: \"0eca1824-e2a4-4bac-bc2d-8e2035dae7c7\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.547081 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0eca1824-e2a4-4bac-bc2d-8e2035dae7c7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0eca1824-e2a4-4bac-bc2d-8e2035dae7c7" (UID: "0eca1824-e2a4-4bac-bc2d-8e2035dae7c7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.547193 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1ee4e121-47a7-4bfa-93ed-d12e521a7238-var-run-ovn\") pod \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.547224 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1ee4e121-47a7-4bfa-93ed-d12e521a7238-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "1ee4e121-47a7-4bfa-93ed-d12e521a7238" (UID: "1ee4e121-47a7-4bfa-93ed-d12e521a7238"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.547265 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wshwl\" (UniqueName: \"kubernetes.io/projected/69ae1e99-afff-4a95-bc00-d1891e12976d-kube-api-access-wshwl\") pod \"69ae1e99-afff-4a95-bc00-d1891e12976d\" (UID: \"69ae1e99-afff-4a95-bc00-d1891e12976d\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.547309 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ee4e121-47a7-4bfa-93ed-d12e521a7238-scripts\") pod \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.547342 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1ee4e121-47a7-4bfa-93ed-d12e521a7238-additional-scripts\") pod \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.547360 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1fe6339-ed29-4e33-a161-12b6c001a7e3-operator-scripts\") pod \"c1fe6339-ed29-4e33-a161-12b6c001a7e3\" (UID: \"c1fe6339-ed29-4e33-a161-12b6c001a7e3\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.547438 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zpv5\" (UniqueName: \"kubernetes.io/projected/1ee4e121-47a7-4bfa-93ed-d12e521a7238-kube-api-access-9zpv5\") pod \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.547543 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1ee4e121-47a7-4bfa-93ed-d12e521a7238-var-run\") pod \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.547563 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69ae1e99-afff-4a95-bc00-d1891e12976d-operator-scripts\") pod \"69ae1e99-afff-4a95-bc00-d1891e12976d\" (UID: \"69ae1e99-afff-4a95-bc00-d1891e12976d\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.547579 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1ee4e121-47a7-4bfa-93ed-d12e521a7238-var-log-ovn\") pod \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\" (UID: \"1ee4e121-47a7-4bfa-93ed-d12e521a7238\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.547602 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbchc\" (UniqueName: \"kubernetes.io/projected/c1fe6339-ed29-4e33-a161-12b6c001a7e3-kube-api-access-dbchc\") pod \"c1fe6339-ed29-4e33-a161-12b6c001a7e3\" (UID: \"c1fe6339-ed29-4e33-a161-12b6c001a7e3\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.547623 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-64lpp\" (UniqueName: \"kubernetes.io/projected/0eca1824-e2a4-4bac-bc2d-8e2035dae7c7-kube-api-access-64lpp\") pod \"0eca1824-e2a4-4bac-bc2d-8e2035dae7c7\" (UID: \"0eca1824-e2a4-4bac-bc2d-8e2035dae7c7\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.548313 4906 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/19570524-db36-4ef4-893a-fc108043dd6d-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.548336 4906 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/26a00786-e81f-488f-88fe-ffc98c7f63e2-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.548350 4906 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4d108c06-2f21-4a05-87ab-caccb04415b5-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.548361 4906 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/1ee4e121-47a7-4bfa-93ed-d12e521a7238-var-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.548372 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sxkxr\" (UniqueName: \"kubernetes.io/projected/19570524-db36-4ef4-893a-fc108043dd6d-kube-api-access-sxkxr\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.548385 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h2hng\" (UniqueName: \"kubernetes.io/projected/26a00786-e81f-488f-88fe-ffc98c7f63e2-kube-api-access-h2hng\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.548394 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bq7ll\" (UniqueName: \"kubernetes.io/projected/4d108c06-2f21-4a05-87ab-caccb04415b5-kube-api-access-bq7ll\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.548537 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c1fe6339-ed29-4e33-a161-12b6c001a7e3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c1fe6339-ed29-4e33-a161-12b6c001a7e3" (UID: "c1fe6339-ed29-4e33-a161-12b6c001a7e3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.548706 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1ee4e121-47a7-4bfa-93ed-d12e521a7238-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "1ee4e121-47a7-4bfa-93ed-d12e521a7238" (UID: "1ee4e121-47a7-4bfa-93ed-d12e521a7238"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.549080 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1ee4e121-47a7-4bfa-93ed-d12e521a7238-var-run" (OuterVolumeSpecName: "var-run") pod "1ee4e121-47a7-4bfa-93ed-d12e521a7238" (UID: "1ee4e121-47a7-4bfa-93ed-d12e521a7238"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.549958 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ee4e121-47a7-4bfa-93ed-d12e521a7238-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "1ee4e121-47a7-4bfa-93ed-d12e521a7238" (UID: "1ee4e121-47a7-4bfa-93ed-d12e521a7238"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.550083 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ee4e121-47a7-4bfa-93ed-d12e521a7238-scripts" (OuterVolumeSpecName: "scripts") pod "1ee4e121-47a7-4bfa-93ed-d12e521a7238" (UID: "1ee4e121-47a7-4bfa-93ed-d12e521a7238"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.551896 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/69ae1e99-afff-4a95-bc00-d1891e12976d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "69ae1e99-afff-4a95-bc00-d1891e12976d" (UID: "69ae1e99-afff-4a95-bc00-d1891e12976d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.568063 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ee4e121-47a7-4bfa-93ed-d12e521a7238-kube-api-access-9zpv5" (OuterVolumeSpecName: "kube-api-access-9zpv5") pod "1ee4e121-47a7-4bfa-93ed-d12e521a7238" (UID: "1ee4e121-47a7-4bfa-93ed-d12e521a7238"). InnerVolumeSpecName "kube-api-access-9zpv5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.570236 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1fe6339-ed29-4e33-a161-12b6c001a7e3-kube-api-access-dbchc" (OuterVolumeSpecName: "kube-api-access-dbchc") pod "c1fe6339-ed29-4e33-a161-12b6c001a7e3" (UID: "c1fe6339-ed29-4e33-a161-12b6c001a7e3"). InnerVolumeSpecName "kube-api-access-dbchc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.570363 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0eca1824-e2a4-4bac-bc2d-8e2035dae7c7-kube-api-access-64lpp" (OuterVolumeSpecName: "kube-api-access-64lpp") pod "0eca1824-e2a4-4bac-bc2d-8e2035dae7c7" (UID: "0eca1824-e2a4-4bac-bc2d-8e2035dae7c7"). InnerVolumeSpecName "kube-api-access-64lpp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.572745 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69ae1e99-afff-4a95-bc00-d1891e12976d-kube-api-access-wshwl" (OuterVolumeSpecName: "kube-api-access-wshwl") pod "69ae1e99-afff-4a95-bc00-d1891e12976d" (UID: "69ae1e99-afff-4a95-bc00-d1891e12976d"). InnerVolumeSpecName "kube-api-access-wshwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.616528 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.649951 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zpv5\" (UniqueName: \"kubernetes.io/projected/1ee4e121-47a7-4bfa-93ed-d12e521a7238-kube-api-access-9zpv5\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.650550 4906 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/1ee4e121-47a7-4bfa-93ed-d12e521a7238-var-run\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.650569 4906 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/69ae1e99-afff-4a95-bc00-d1891e12976d-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.650584 4906 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/1ee4e121-47a7-4bfa-93ed-d12e521a7238-var-log-ovn\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.650598 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbchc\" (UniqueName: \"kubernetes.io/projected/c1fe6339-ed29-4e33-a161-12b6c001a7e3-kube-api-access-dbchc\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.650629 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-64lpp\" (UniqueName: \"kubernetes.io/projected/0eca1824-e2a4-4bac-bc2d-8e2035dae7c7-kube-api-access-64lpp\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.650641 4906 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0eca1824-e2a4-4bac-bc2d-8e2035dae7c7-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.650649 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wshwl\" (UniqueName: \"kubernetes.io/projected/69ae1e99-afff-4a95-bc00-d1891e12976d-kube-api-access-wshwl\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.650658 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1ee4e121-47a7-4bfa-93ed-d12e521a7238-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.650665 4906 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/1ee4e121-47a7-4bfa-93ed-d12e521a7238-additional-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.650673 4906 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c1fe6339-ed29-4e33-a161-12b6c001a7e3-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.752015 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-ovsdbserver-sb\") pod \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\" (UID: \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.752160 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-config\") pod \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\" (UID: \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.752187 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-dns-svc\") pod \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\" (UID: \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.752222 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-ovsdbserver-nb\") pod \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\" (UID: \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.752337 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kqf5\" (UniqueName: \"kubernetes.io/projected/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-kube-api-access-4kqf5\") pod \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\" (UID: \"1e02b99f-7f4c-43b3-9f00-422041bb0c7c\") " Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.768699 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-kube-api-access-4kqf5" (OuterVolumeSpecName: "kube-api-access-4kqf5") pod "1e02b99f-7f4c-43b3-9f00-422041bb0c7c" (UID: "1e02b99f-7f4c-43b3-9f00-422041bb0c7c"). InnerVolumeSpecName "kube-api-access-4kqf5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.798108 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1e02b99f-7f4c-43b3-9f00-422041bb0c7c" (UID: "1e02b99f-7f4c-43b3-9f00-422041bb0c7c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.806901 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-config" (OuterVolumeSpecName: "config") pod "1e02b99f-7f4c-43b3-9f00-422041bb0c7c" (UID: "1e02b99f-7f4c-43b3-9f00-422041bb0c7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.807738 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1e02b99f-7f4c-43b3-9f00-422041bb0c7c" (UID: "1e02b99f-7f4c-43b3-9f00-422041bb0c7c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.816004 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1e02b99f-7f4c-43b3-9f00-422041bb0c7c" (UID: "1e02b99f-7f4c-43b3-9f00-422041bb0c7c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.854893 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.854934 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.854947 4906 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.854955 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:40 crc kubenswrapper[4906]: I0227 08:49:40.854965 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kqf5\" (UniqueName: \"kubernetes.io/projected/1e02b99f-7f4c-43b3-9f00-422041bb0c7c-kube-api-access-4kqf5\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.219608 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" event={"ID":"1e02b99f-7f4c-43b3-9f00-422041bb0c7c","Type":"ContainerDied","Data":"0d7010a86f36ed631fdb6ecd1bf1e8a9833ee0d709cc532d1e7a7fe8f9aa41a8"} Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.219688 4906 scope.go:117] "RemoveContainer" containerID="234b3f11147ea18a2437c76efbb855fdc97859c64374fbd4ea8763696877e28f" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.219632 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-tbrl8" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.222258 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-1cea-account-create-update-s8q8n" event={"ID":"c1fe6339-ed29-4e33-a161-12b6c001a7e3","Type":"ContainerDied","Data":"2f0d0c0e1f68a08c611958871dc056681c00a8fc93a6a0e8c3440fa054ab704a"} Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.222315 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f0d0c0e1f68a08c611958871dc056681c00a8fc93a6a0e8c3440fa054ab704a" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.222390 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-1cea-account-create-update-s8q8n" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.224511 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-2a5c-account-create-update-vfl7s" event={"ID":"69ae1e99-afff-4a95-bc00-d1891e12976d","Type":"ContainerDied","Data":"331f24560fe44105d5d8785f2b7106c5c1fb3409f6d015731e99977667e33422"} Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.224571 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="331f24560fe44105d5d8785f2b7106c5c1fb3409f6d015731e99977667e33422" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.224679 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-2a5c-account-create-update-vfl7s" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.228184 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-67lpp-config-hpln9" event={"ID":"1ee4e121-47a7-4bfa-93ed-d12e521a7238","Type":"ContainerDied","Data":"660db3133a670fb76ee27186f594d2470925e57c0e4e55a7193ce4609111e533"} Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.228248 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="660db3133a670fb76ee27186f594d2470925e57c0e4e55a7193ce4609111e533" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.228347 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-67lpp-config-hpln9" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.231738 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-bx2nz" event={"ID":"0eca1824-e2a4-4bac-bc2d-8e2035dae7c7","Type":"ContainerDied","Data":"85d43647150d59dc9b11b1fd23e8d04ea072bc7d662441063184183c45865f04"} Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.232597 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="85d43647150d59dc9b11b1fd23e8d04ea072bc7d662441063184183c45865f04" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.232082 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-bx2nz" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.235829 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-nbf48" event={"ID":"19570524-db36-4ef4-893a-fc108043dd6d","Type":"ContainerDied","Data":"db3713a0f58d86ec8631dc79cc2ca8319f9f1642980df166d08fff2b46c7ec1b"} Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.235956 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db3713a0f58d86ec8631dc79cc2ca8319f9f1642980df166d08fff2b46c7ec1b" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.235910 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-nbf48" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.244991 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-w76h9" event={"ID":"4d108c06-2f21-4a05-87ab-caccb04415b5","Type":"ContainerDied","Data":"dd2c81cef735ed9837b906d3d93a2f8362198f520114f557c999c636381cd601"} Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.245082 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd2c81cef735ed9837b906d3d93a2f8362198f520114f557c999c636381cd601" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.245019 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-w76h9" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.248165 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bb99-account-create-update-ppmpk" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.248191 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bb99-account-create-update-ppmpk" event={"ID":"26a00786-e81f-488f-88fe-ffc98c7f63e2","Type":"ContainerDied","Data":"c524f9b749d57c28f17f29e11e903f8e5260431a4f53131270748de0ffebc39b"} Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.248220 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c524f9b749d57c28f17f29e11e903f8e5260431a4f53131270748de0ffebc39b" Feb 27 08:49:41 crc kubenswrapper[4906]: E0227 08:49:41.250645 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"swift-ring-rebalance\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified\\\"\"" pod="openstack/swift-ring-rebalance-6jk9r" podUID="db586541-2471-4a37-a7b6-3c8f324a696b" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.284228 4906 scope.go:117] "RemoveContainer" containerID="fafc5c0c404d230ec5c81ae8728858aac2545af5a042343a16927d12242dd4d7" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.317827 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-tbrl8"] Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.325662 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-tbrl8"] Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.544429 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-67lpp-config-hpln9"] Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.551573 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-67lpp-config-hpln9"] Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.641119 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-67lpp-config-5kmkv"] Feb 27 08:49:41 crc kubenswrapper[4906]: E0227 08:49:41.641526 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19570524-db36-4ef4-893a-fc108043dd6d" containerName="mariadb-account-create-update" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.641545 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="19570524-db36-4ef4-893a-fc108043dd6d" containerName="mariadb-account-create-update" Feb 27 08:49:41 crc kubenswrapper[4906]: E0227 08:49:41.641565 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="26a00786-e81f-488f-88fe-ffc98c7f63e2" containerName="mariadb-account-create-update" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.641574 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="26a00786-e81f-488f-88fe-ffc98c7f63e2" containerName="mariadb-account-create-update" Feb 27 08:49:41 crc kubenswrapper[4906]: E0227 08:49:41.641594 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4d108c06-2f21-4a05-87ab-caccb04415b5" containerName="mariadb-database-create" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.641606 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="4d108c06-2f21-4a05-87ab-caccb04415b5" containerName="mariadb-database-create" Feb 27 08:49:41 crc kubenswrapper[4906]: E0227 08:49:41.641627 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e02b99f-7f4c-43b3-9f00-422041bb0c7c" containerName="init" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.641635 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e02b99f-7f4c-43b3-9f00-422041bb0c7c" containerName="init" Feb 27 08:49:41 crc kubenswrapper[4906]: E0227 08:49:41.641648 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69ae1e99-afff-4a95-bc00-d1891e12976d" containerName="mariadb-account-create-update" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.641657 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="69ae1e99-afff-4a95-bc00-d1891e12976d" containerName="mariadb-account-create-update" Feb 27 08:49:41 crc kubenswrapper[4906]: E0227 08:49:41.641667 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0eca1824-e2a4-4bac-bc2d-8e2035dae7c7" containerName="mariadb-database-create" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.641674 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="0eca1824-e2a4-4bac-bc2d-8e2035dae7c7" containerName="mariadb-database-create" Feb 27 08:49:41 crc kubenswrapper[4906]: E0227 08:49:41.641685 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ee4e121-47a7-4bfa-93ed-d12e521a7238" containerName="ovn-config" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.641693 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ee4e121-47a7-4bfa-93ed-d12e521a7238" containerName="ovn-config" Feb 27 08:49:41 crc kubenswrapper[4906]: E0227 08:49:41.641705 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1fe6339-ed29-4e33-a161-12b6c001a7e3" containerName="mariadb-account-create-update" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.641713 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1fe6339-ed29-4e33-a161-12b6c001a7e3" containerName="mariadb-account-create-update" Feb 27 08:49:41 crc kubenswrapper[4906]: E0227 08:49:41.641723 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e02b99f-7f4c-43b3-9f00-422041bb0c7c" containerName="dnsmasq-dns" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.641730 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e02b99f-7f4c-43b3-9f00-422041bb0c7c" containerName="dnsmasq-dns" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.641869 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="69ae1e99-afff-4a95-bc00-d1891e12976d" containerName="mariadb-account-create-update" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.641905 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="19570524-db36-4ef4-893a-fc108043dd6d" containerName="mariadb-account-create-update" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.641915 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="0eca1824-e2a4-4bac-bc2d-8e2035dae7c7" containerName="mariadb-database-create" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.641924 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1fe6339-ed29-4e33-a161-12b6c001a7e3" containerName="mariadb-account-create-update" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.641933 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ee4e121-47a7-4bfa-93ed-d12e521a7238" containerName="ovn-config" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.641941 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="26a00786-e81f-488f-88fe-ffc98c7f63e2" containerName="mariadb-account-create-update" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.641952 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e02b99f-7f4c-43b3-9f00-422041bb0c7c" containerName="dnsmasq-dns" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.641961 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="4d108c06-2f21-4a05-87ab-caccb04415b5" containerName="mariadb-database-create" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.642536 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.645459 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.654577 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-67lpp-config-5kmkv"] Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.776241 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkkrg\" (UniqueName: \"kubernetes.io/projected/aceaa9f9-225f-4194-ae36-f82910ac4367-kube-api-access-lkkrg\") pod \"ovn-controller-67lpp-config-5kmkv\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.776300 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/aceaa9f9-225f-4194-ae36-f82910ac4367-var-run\") pod \"ovn-controller-67lpp-config-5kmkv\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.776635 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/aceaa9f9-225f-4194-ae36-f82910ac4367-var-run-ovn\") pod \"ovn-controller-67lpp-config-5kmkv\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.776706 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/aceaa9f9-225f-4194-ae36-f82910ac4367-additional-scripts\") pod \"ovn-controller-67lpp-config-5kmkv\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.777025 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/aceaa9f9-225f-4194-ae36-f82910ac4367-var-log-ovn\") pod \"ovn-controller-67lpp-config-5kmkv\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.777066 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aceaa9f9-225f-4194-ae36-f82910ac4367-scripts\") pod \"ovn-controller-67lpp-config-5kmkv\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.879064 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/aceaa9f9-225f-4194-ae36-f82910ac4367-var-log-ovn\") pod \"ovn-controller-67lpp-config-5kmkv\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.879182 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aceaa9f9-225f-4194-ae36-f82910ac4367-scripts\") pod \"ovn-controller-67lpp-config-5kmkv\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.879259 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkkrg\" (UniqueName: \"kubernetes.io/projected/aceaa9f9-225f-4194-ae36-f82910ac4367-kube-api-access-lkkrg\") pod \"ovn-controller-67lpp-config-5kmkv\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.879296 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/aceaa9f9-225f-4194-ae36-f82910ac4367-var-run\") pod \"ovn-controller-67lpp-config-5kmkv\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.879343 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/aceaa9f9-225f-4194-ae36-f82910ac4367-var-run-ovn\") pod \"ovn-controller-67lpp-config-5kmkv\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.879364 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/aceaa9f9-225f-4194-ae36-f82910ac4367-additional-scripts\") pod \"ovn-controller-67lpp-config-5kmkv\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.879463 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/aceaa9f9-225f-4194-ae36-f82910ac4367-var-log-ovn\") pod \"ovn-controller-67lpp-config-5kmkv\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.879547 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/aceaa9f9-225f-4194-ae36-f82910ac4367-var-run\") pod \"ovn-controller-67lpp-config-5kmkv\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.879899 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/aceaa9f9-225f-4194-ae36-f82910ac4367-var-run-ovn\") pod \"ovn-controller-67lpp-config-5kmkv\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.880291 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/aceaa9f9-225f-4194-ae36-f82910ac4367-additional-scripts\") pod \"ovn-controller-67lpp-config-5kmkv\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.881451 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aceaa9f9-225f-4194-ae36-f82910ac4367-scripts\") pod \"ovn-controller-67lpp-config-5kmkv\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.907376 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkkrg\" (UniqueName: \"kubernetes.io/projected/aceaa9f9-225f-4194-ae36-f82910ac4367-kube-api-access-lkkrg\") pod \"ovn-controller-67lpp-config-5kmkv\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:41 crc kubenswrapper[4906]: I0227 08:49:41.973042 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.443526 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-67lpp-config-5kmkv"] Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.518301 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-sgch9"] Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.520172 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-sgch9" Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.523544 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-rc48q" Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.523628 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.587825 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1e02b99f-7f4c-43b3-9f00-422041bb0c7c" path="/var/lib/kubelet/pods/1e02b99f-7f4c-43b3-9f00-422041bb0c7c/volumes" Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.596205 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ee4e121-47a7-4bfa-93ed-d12e521a7238" path="/var/lib/kubelet/pods/1ee4e121-47a7-4bfa-93ed-d12e521a7238/volumes" Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.597246 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-sgch9"] Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.597329 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.697562 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-config-data\") pod \"glance-db-sync-sgch9\" (UID: \"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa\") " pod="openstack/glance-db-sync-sgch9" Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.697613 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-db-sync-config-data\") pod \"glance-db-sync-sgch9\" (UID: \"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa\") " pod="openstack/glance-db-sync-sgch9" Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.698035 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-combined-ca-bundle\") pod \"glance-db-sync-sgch9\" (UID: \"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa\") " pod="openstack/glance-db-sync-sgch9" Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.698505 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kk88q\" (UniqueName: \"kubernetes.io/projected/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-kube-api-access-kk88q\") pod \"glance-db-sync-sgch9\" (UID: \"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa\") " pod="openstack/glance-db-sync-sgch9" Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.698563 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:49:42 crc kubenswrapper[4906]: E0227 08:49:42.698800 4906 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 27 08:49:42 crc kubenswrapper[4906]: E0227 08:49:42.698824 4906 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 27 08:49:42 crc kubenswrapper[4906]: E0227 08:49:42.698929 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift podName:c98486bd-1325-4072-bce0-a28d38ecead2 nodeName:}" failed. No retries permitted until 2026-02-27 08:50:14.698908364 +0000 UTC m=+1313.093309974 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift") pod "swift-storage-0" (UID: "c98486bd-1325-4072-bce0-a28d38ecead2") : configmap "swift-ring-files" not found Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.800547 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-combined-ca-bundle\") pod \"glance-db-sync-sgch9\" (UID: \"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa\") " pod="openstack/glance-db-sync-sgch9" Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.800653 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kk88q\" (UniqueName: \"kubernetes.io/projected/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-kube-api-access-kk88q\") pod \"glance-db-sync-sgch9\" (UID: \"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa\") " pod="openstack/glance-db-sync-sgch9" Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.800745 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-config-data\") pod \"glance-db-sync-sgch9\" (UID: \"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa\") " pod="openstack/glance-db-sync-sgch9" Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.800768 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-db-sync-config-data\") pod \"glance-db-sync-sgch9\" (UID: \"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa\") " pod="openstack/glance-db-sync-sgch9" Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.815237 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-db-sync-config-data\") pod \"glance-db-sync-sgch9\" (UID: \"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa\") " pod="openstack/glance-db-sync-sgch9" Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.815426 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-combined-ca-bundle\") pod \"glance-db-sync-sgch9\" (UID: \"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa\") " pod="openstack/glance-db-sync-sgch9" Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.817424 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-config-data\") pod \"glance-db-sync-sgch9\" (UID: \"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa\") " pod="openstack/glance-db-sync-sgch9" Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.826352 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kk88q\" (UniqueName: \"kubernetes.io/projected/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-kube-api-access-kk88q\") pod \"glance-db-sync-sgch9\" (UID: \"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa\") " pod="openstack/glance-db-sync-sgch9" Feb 27 08:49:42 crc kubenswrapper[4906]: I0227 08:49:42.892795 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-sgch9" Feb 27 08:49:43 crc kubenswrapper[4906]: I0227 08:49:43.269780 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-67lpp-config-5kmkv" event={"ID":"aceaa9f9-225f-4194-ae36-f82910ac4367","Type":"ContainerStarted","Data":"673a8338d5a8f6329be335e66d637dc9dc85411c9b14d49d5c5b8591c2c2d501"} Feb 27 08:49:43 crc kubenswrapper[4906]: I0227 08:49:43.270393 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-67lpp-config-5kmkv" event={"ID":"aceaa9f9-225f-4194-ae36-f82910ac4367","Type":"ContainerStarted","Data":"9279352c1a56f9ff97bf270862e411b4b52d8a5f10a36d60ea128bc9fb186780"} Feb 27 08:49:43 crc kubenswrapper[4906]: I0227 08:49:43.628816 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-67lpp-config-5kmkv" podStartSLOduration=2.628781376 podStartE2EDuration="2.628781376s" podCreationTimestamp="2026-02-27 08:49:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:49:43.296465543 +0000 UTC m=+1281.690867153" watchObservedRunningTime="2026-02-27 08:49:43.628781376 +0000 UTC m=+1282.023182986" Feb 27 08:49:43 crc kubenswrapper[4906]: W0227 08:49:43.635461 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode2e1d22f_d9ac_467f_81f9_e4a4282f46aa.slice/crio-6913d318e7ba92353912b3906f9d06112d1a769806ff9604c46e6815e87d854f WatchSource:0}: Error finding container 6913d318e7ba92353912b3906f9d06112d1a769806ff9604c46e6815e87d854f: Status 404 returned error can't find the container with id 6913d318e7ba92353912b3906f9d06112d1a769806ff9604c46e6815e87d854f Feb 27 08:49:43 crc kubenswrapper[4906]: I0227 08:49:43.641156 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-sgch9"] Feb 27 08:49:44 crc kubenswrapper[4906]: I0227 08:49:44.173762 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:49:44 crc kubenswrapper[4906]: I0227 08:49:44.282002 4906 generic.go:334] "Generic (PLEG): container finished" podID="aceaa9f9-225f-4194-ae36-f82910ac4367" containerID="673a8338d5a8f6329be335e66d637dc9dc85411c9b14d49d5c5b8591c2c2d501" exitCode=0 Feb 27 08:49:44 crc kubenswrapper[4906]: I0227 08:49:44.282122 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-67lpp-config-5kmkv" event={"ID":"aceaa9f9-225f-4194-ae36-f82910ac4367","Type":"ContainerDied","Data":"673a8338d5a8f6329be335e66d637dc9dc85411c9b14d49d5c5b8591c2c2d501"} Feb 27 08:49:44 crc kubenswrapper[4906]: I0227 08:49:44.283282 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-sgch9" event={"ID":"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa","Type":"ContainerStarted","Data":"6913d318e7ba92353912b3906f9d06112d1a769806ff9604c46e6815e87d854f"} Feb 27 08:49:44 crc kubenswrapper[4906]: I0227 08:49:44.509069 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.270465 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-nbf48"] Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.280750 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-nbf48"] Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.385217 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-lp8rr"] Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.386649 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-lp8rr" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.389533 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.399386 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-lp8rr"] Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.452095 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bq9mw\" (UniqueName: \"kubernetes.io/projected/81c574b4-61cc-4790-93c5-a08e34058cbe-kube-api-access-bq9mw\") pod \"root-account-create-update-lp8rr\" (UID: \"81c574b4-61cc-4790-93c5-a08e34058cbe\") " pod="openstack/root-account-create-update-lp8rr" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.452522 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81c574b4-61cc-4790-93c5-a08e34058cbe-operator-scripts\") pod \"root-account-create-update-lp8rr\" (UID: \"81c574b4-61cc-4790-93c5-a08e34058cbe\") " pod="openstack/root-account-create-update-lp8rr" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.554350 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81c574b4-61cc-4790-93c5-a08e34058cbe-operator-scripts\") pod \"root-account-create-update-lp8rr\" (UID: \"81c574b4-61cc-4790-93c5-a08e34058cbe\") " pod="openstack/root-account-create-update-lp8rr" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.554419 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bq9mw\" (UniqueName: \"kubernetes.io/projected/81c574b4-61cc-4790-93c5-a08e34058cbe-kube-api-access-bq9mw\") pod \"root-account-create-update-lp8rr\" (UID: \"81c574b4-61cc-4790-93c5-a08e34058cbe\") " pod="openstack/root-account-create-update-lp8rr" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.555730 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81c574b4-61cc-4790-93c5-a08e34058cbe-operator-scripts\") pod \"root-account-create-update-lp8rr\" (UID: \"81c574b4-61cc-4790-93c5-a08e34058cbe\") " pod="openstack/root-account-create-update-lp8rr" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.581101 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bq9mw\" (UniqueName: \"kubernetes.io/projected/81c574b4-61cc-4790-93c5-a08e34058cbe-kube-api-access-bq9mw\") pod \"root-account-create-update-lp8rr\" (UID: \"81c574b4-61cc-4790-93c5-a08e34058cbe\") " pod="openstack/root-account-create-update-lp8rr" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.648391 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.745726 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-lp8rr" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.758398 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkkrg\" (UniqueName: \"kubernetes.io/projected/aceaa9f9-225f-4194-ae36-f82910ac4367-kube-api-access-lkkrg\") pod \"aceaa9f9-225f-4194-ae36-f82910ac4367\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.758578 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aceaa9f9-225f-4194-ae36-f82910ac4367-scripts\") pod \"aceaa9f9-225f-4194-ae36-f82910ac4367\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.758645 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/aceaa9f9-225f-4194-ae36-f82910ac4367-additional-scripts\") pod \"aceaa9f9-225f-4194-ae36-f82910ac4367\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.758697 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/aceaa9f9-225f-4194-ae36-f82910ac4367-var-log-ovn\") pod \"aceaa9f9-225f-4194-ae36-f82910ac4367\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.758725 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/aceaa9f9-225f-4194-ae36-f82910ac4367-var-run-ovn\") pod \"aceaa9f9-225f-4194-ae36-f82910ac4367\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.758764 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/aceaa9f9-225f-4194-ae36-f82910ac4367-var-run\") pod \"aceaa9f9-225f-4194-ae36-f82910ac4367\" (UID: \"aceaa9f9-225f-4194-ae36-f82910ac4367\") " Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.758829 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/aceaa9f9-225f-4194-ae36-f82910ac4367-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "aceaa9f9-225f-4194-ae36-f82910ac4367" (UID: "aceaa9f9-225f-4194-ae36-f82910ac4367"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.758873 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/aceaa9f9-225f-4194-ae36-f82910ac4367-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "aceaa9f9-225f-4194-ae36-f82910ac4367" (UID: "aceaa9f9-225f-4194-ae36-f82910ac4367"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.758985 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/aceaa9f9-225f-4194-ae36-f82910ac4367-var-run" (OuterVolumeSpecName: "var-run") pod "aceaa9f9-225f-4194-ae36-f82910ac4367" (UID: "aceaa9f9-225f-4194-ae36-f82910ac4367"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.759229 4906 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/aceaa9f9-225f-4194-ae36-f82910ac4367-var-log-ovn\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.759248 4906 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/aceaa9f9-225f-4194-ae36-f82910ac4367-var-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.759257 4906 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/aceaa9f9-225f-4194-ae36-f82910ac4367-var-run\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.759477 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aceaa9f9-225f-4194-ae36-f82910ac4367-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "aceaa9f9-225f-4194-ae36-f82910ac4367" (UID: "aceaa9f9-225f-4194-ae36-f82910ac4367"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.760047 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aceaa9f9-225f-4194-ae36-f82910ac4367-scripts" (OuterVolumeSpecName: "scripts") pod "aceaa9f9-225f-4194-ae36-f82910ac4367" (UID: "aceaa9f9-225f-4194-ae36-f82910ac4367"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.764178 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aceaa9f9-225f-4194-ae36-f82910ac4367-kube-api-access-lkkrg" (OuterVolumeSpecName: "kube-api-access-lkkrg") pod "aceaa9f9-225f-4194-ae36-f82910ac4367" (UID: "aceaa9f9-225f-4194-ae36-f82910ac4367"). InnerVolumeSpecName "kube-api-access-lkkrg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.861594 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkkrg\" (UniqueName: \"kubernetes.io/projected/aceaa9f9-225f-4194-ae36-f82910ac4367-kube-api-access-lkkrg\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.862017 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/aceaa9f9-225f-4194-ae36-f82910ac4367-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:45 crc kubenswrapper[4906]: I0227 08:49:45.862028 4906 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/aceaa9f9-225f-4194-ae36-f82910ac4367-additional-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.229155 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-lp8rr"] Feb 27 08:49:46 crc kubenswrapper[4906]: W0227 08:49:46.244844 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod81c574b4_61cc_4790_93c5_a08e34058cbe.slice/crio-0f5cc39e5eb27ff03fddfee227b08c0ea414da5cec7cdae68bd7a05dfec3db0c WatchSource:0}: Error finding container 0f5cc39e5eb27ff03fddfee227b08c0ea414da5cec7cdae68bd7a05dfec3db0c: Status 404 returned error can't find the container with id 0f5cc39e5eb27ff03fddfee227b08c0ea414da5cec7cdae68bd7a05dfec3db0c Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.314406 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-67lpp-config-5kmkv" event={"ID":"aceaa9f9-225f-4194-ae36-f82910ac4367","Type":"ContainerDied","Data":"9279352c1a56f9ff97bf270862e411b4b52d8a5f10a36d60ea128bc9fb186780"} Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.314461 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9279352c1a56f9ff97bf270862e411b4b52d8a5f10a36d60ea128bc9fb186780" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.315998 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-67lpp-config-5kmkv" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.316303 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-lp8rr" event={"ID":"81c574b4-61cc-4790-93c5-a08e34058cbe","Type":"ContainerStarted","Data":"0f5cc39e5eb27ff03fddfee227b08c0ea414da5cec7cdae68bd7a05dfec3db0c"} Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.393838 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-7nq6g"] Feb 27 08:49:46 crc kubenswrapper[4906]: E0227 08:49:46.394273 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aceaa9f9-225f-4194-ae36-f82910ac4367" containerName="ovn-config" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.394294 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="aceaa9f9-225f-4194-ae36-f82910ac4367" containerName="ovn-config" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.394468 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="aceaa9f9-225f-4194-ae36-f82910ac4367" containerName="ovn-config" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.397860 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-7nq6g" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.410947 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-7nq6g"] Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.476871 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c7fc6\" (UniqueName: \"kubernetes.io/projected/17434f9b-c5b6-4828-a71e-d8ebefaf9aba-kube-api-access-c7fc6\") pod \"cinder-db-create-7nq6g\" (UID: \"17434f9b-c5b6-4828-a71e-d8ebefaf9aba\") " pod="openstack/cinder-db-create-7nq6g" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.477094 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17434f9b-c5b6-4828-a71e-d8ebefaf9aba-operator-scripts\") pod \"cinder-db-create-7nq6g\" (UID: \"17434f9b-c5b6-4828-a71e-d8ebefaf9aba\") " pod="openstack/cinder-db-create-7nq6g" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.519720 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-0045-account-create-update-wqm68"] Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.521156 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0045-account-create-update-wqm68" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.523999 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.548460 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-0045-account-create-update-wqm68"] Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.565006 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19570524-db36-4ef4-893a-fc108043dd6d" path="/var/lib/kubelet/pods/19570524-db36-4ef4-893a-fc108043dd6d/volumes" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.580144 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17434f9b-c5b6-4828-a71e-d8ebefaf9aba-operator-scripts\") pod \"cinder-db-create-7nq6g\" (UID: \"17434f9b-c5b6-4828-a71e-d8ebefaf9aba\") " pod="openstack/cinder-db-create-7nq6g" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.580233 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c7fc6\" (UniqueName: \"kubernetes.io/projected/17434f9b-c5b6-4828-a71e-d8ebefaf9aba-kube-api-access-c7fc6\") pod \"cinder-db-create-7nq6g\" (UID: \"17434f9b-c5b6-4828-a71e-d8ebefaf9aba\") " pod="openstack/cinder-db-create-7nq6g" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.581691 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17434f9b-c5b6-4828-a71e-d8ebefaf9aba-operator-scripts\") pod \"cinder-db-create-7nq6g\" (UID: \"17434f9b-c5b6-4828-a71e-d8ebefaf9aba\") " pod="openstack/cinder-db-create-7nq6g" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.631495 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c7fc6\" (UniqueName: \"kubernetes.io/projected/17434f9b-c5b6-4828-a71e-d8ebefaf9aba-kube-api-access-c7fc6\") pod \"cinder-db-create-7nq6g\" (UID: \"17434f9b-c5b6-4828-a71e-d8ebefaf9aba\") " pod="openstack/cinder-db-create-7nq6g" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.676826 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-xhr9h"] Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.678216 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xhr9h" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.680968 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.681824 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-x96zn" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.681992 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.682132 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.683824 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c0d44ec-1d13-43a5-90b9-14282b765fdc-operator-scripts\") pod \"barbican-0045-account-create-update-wqm68\" (UID: \"1c0d44ec-1d13-43a5-90b9-14282b765fdc\") " pod="openstack/barbican-0045-account-create-update-wqm68" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.683967 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jq6cv\" (UniqueName: \"kubernetes.io/projected/1c0d44ec-1d13-43a5-90b9-14282b765fdc-kube-api-access-jq6cv\") pod \"barbican-0045-account-create-update-wqm68\" (UID: \"1c0d44ec-1d13-43a5-90b9-14282b765fdc\") " pod="openstack/barbican-0045-account-create-update-wqm68" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.698066 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-c732-account-create-update-srkl5"] Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.699476 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c732-account-create-update-srkl5" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.705061 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.707954 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-plwsw"] Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.709256 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-plwsw" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.733910 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-xhr9h"] Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.764926 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-7nq6g" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.779802 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-plwsw"] Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.785652 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-c732-account-create-update-srkl5"] Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.788245 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhhwq\" (UniqueName: \"kubernetes.io/projected/5c8ae872-f33a-4027-b4f4-ed0baf60a19f-kube-api-access-hhhwq\") pod \"barbican-db-create-plwsw\" (UID: \"5c8ae872-f33a-4027-b4f4-ed0baf60a19f\") " pod="openstack/barbican-db-create-plwsw" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.788306 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bebd962-3db8-45c9-9de7-a96a207a8b12-config-data\") pod \"keystone-db-sync-xhr9h\" (UID: \"2bebd962-3db8-45c9-9de7-a96a207a8b12\") " pod="openstack/keystone-db-sync-xhr9h" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.788324 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bebd962-3db8-45c9-9de7-a96a207a8b12-combined-ca-bundle\") pod \"keystone-db-sync-xhr9h\" (UID: \"2bebd962-3db8-45c9-9de7-a96a207a8b12\") " pod="openstack/keystone-db-sync-xhr9h" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.788351 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c0d44ec-1d13-43a5-90b9-14282b765fdc-operator-scripts\") pod \"barbican-0045-account-create-update-wqm68\" (UID: \"1c0d44ec-1d13-43a5-90b9-14282b765fdc\") " pod="openstack/barbican-0045-account-create-update-wqm68" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.788393 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jq6cv\" (UniqueName: \"kubernetes.io/projected/1c0d44ec-1d13-43a5-90b9-14282b765fdc-kube-api-access-jq6cv\") pod \"barbican-0045-account-create-update-wqm68\" (UID: \"1c0d44ec-1d13-43a5-90b9-14282b765fdc\") " pod="openstack/barbican-0045-account-create-update-wqm68" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.788456 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghfsk\" (UniqueName: \"kubernetes.io/projected/2bebd962-3db8-45c9-9de7-a96a207a8b12-kube-api-access-ghfsk\") pod \"keystone-db-sync-xhr9h\" (UID: \"2bebd962-3db8-45c9-9de7-a96a207a8b12\") " pod="openstack/keystone-db-sync-xhr9h" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.788474 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc216e22-3106-40fb-9f13-fddc321b8394-operator-scripts\") pod \"cinder-c732-account-create-update-srkl5\" (UID: \"cc216e22-3106-40fb-9f13-fddc321b8394\") " pod="openstack/cinder-c732-account-create-update-srkl5" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.788492 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c8ae872-f33a-4027-b4f4-ed0baf60a19f-operator-scripts\") pod \"barbican-db-create-plwsw\" (UID: \"5c8ae872-f33a-4027-b4f4-ed0baf60a19f\") " pod="openstack/barbican-db-create-plwsw" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.788521 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sm9sj\" (UniqueName: \"kubernetes.io/projected/cc216e22-3106-40fb-9f13-fddc321b8394-kube-api-access-sm9sj\") pod \"cinder-c732-account-create-update-srkl5\" (UID: \"cc216e22-3106-40fb-9f13-fddc321b8394\") " pod="openstack/cinder-c732-account-create-update-srkl5" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.791498 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c0d44ec-1d13-43a5-90b9-14282b765fdc-operator-scripts\") pod \"barbican-0045-account-create-update-wqm68\" (UID: \"1c0d44ec-1d13-43a5-90b9-14282b765fdc\") " pod="openstack/barbican-0045-account-create-update-wqm68" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.819632 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jq6cv\" (UniqueName: \"kubernetes.io/projected/1c0d44ec-1d13-43a5-90b9-14282b765fdc-kube-api-access-jq6cv\") pod \"barbican-0045-account-create-update-wqm68\" (UID: \"1c0d44ec-1d13-43a5-90b9-14282b765fdc\") " pod="openstack/barbican-0045-account-create-update-wqm68" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.838522 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-7zdvv"] Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.840043 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-7zdvv" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.846140 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0045-account-create-update-wqm68" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.848599 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-67lpp-config-5kmkv"] Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.868268 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-7zdvv"] Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.895538 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghfsk\" (UniqueName: \"kubernetes.io/projected/2bebd962-3db8-45c9-9de7-a96a207a8b12-kube-api-access-ghfsk\") pod \"keystone-db-sync-xhr9h\" (UID: \"2bebd962-3db8-45c9-9de7-a96a207a8b12\") " pod="openstack/keystone-db-sync-xhr9h" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.895611 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc216e22-3106-40fb-9f13-fddc321b8394-operator-scripts\") pod \"cinder-c732-account-create-update-srkl5\" (UID: \"cc216e22-3106-40fb-9f13-fddc321b8394\") " pod="openstack/cinder-c732-account-create-update-srkl5" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.895667 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c8ae872-f33a-4027-b4f4-ed0baf60a19f-operator-scripts\") pod \"barbican-db-create-plwsw\" (UID: \"5c8ae872-f33a-4027-b4f4-ed0baf60a19f\") " pod="openstack/barbican-db-create-plwsw" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.895743 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sm9sj\" (UniqueName: \"kubernetes.io/projected/cc216e22-3106-40fb-9f13-fddc321b8394-kube-api-access-sm9sj\") pod \"cinder-c732-account-create-update-srkl5\" (UID: \"cc216e22-3106-40fb-9f13-fddc321b8394\") " pod="openstack/cinder-c732-account-create-update-srkl5" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.895972 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhhwq\" (UniqueName: \"kubernetes.io/projected/5c8ae872-f33a-4027-b4f4-ed0baf60a19f-kube-api-access-hhhwq\") pod \"barbican-db-create-plwsw\" (UID: \"5c8ae872-f33a-4027-b4f4-ed0baf60a19f\") " pod="openstack/barbican-db-create-plwsw" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.896093 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bebd962-3db8-45c9-9de7-a96a207a8b12-config-data\") pod \"keystone-db-sync-xhr9h\" (UID: \"2bebd962-3db8-45c9-9de7-a96a207a8b12\") " pod="openstack/keystone-db-sync-xhr9h" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.896137 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bebd962-3db8-45c9-9de7-a96a207a8b12-combined-ca-bundle\") pod \"keystone-db-sync-xhr9h\" (UID: \"2bebd962-3db8-45c9-9de7-a96a207a8b12\") " pod="openstack/keystone-db-sync-xhr9h" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.898231 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc216e22-3106-40fb-9f13-fddc321b8394-operator-scripts\") pod \"cinder-c732-account-create-update-srkl5\" (UID: \"cc216e22-3106-40fb-9f13-fddc321b8394\") " pod="openstack/cinder-c732-account-create-update-srkl5" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.899011 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c8ae872-f33a-4027-b4f4-ed0baf60a19f-operator-scripts\") pod \"barbican-db-create-plwsw\" (UID: \"5c8ae872-f33a-4027-b4f4-ed0baf60a19f\") " pod="openstack/barbican-db-create-plwsw" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.917143 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bebd962-3db8-45c9-9de7-a96a207a8b12-combined-ca-bundle\") pod \"keystone-db-sync-xhr9h\" (UID: \"2bebd962-3db8-45c9-9de7-a96a207a8b12\") " pod="openstack/keystone-db-sync-xhr9h" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.919362 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghfsk\" (UniqueName: \"kubernetes.io/projected/2bebd962-3db8-45c9-9de7-a96a207a8b12-kube-api-access-ghfsk\") pod \"keystone-db-sync-xhr9h\" (UID: \"2bebd962-3db8-45c9-9de7-a96a207a8b12\") " pod="openstack/keystone-db-sync-xhr9h" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.927417 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sm9sj\" (UniqueName: \"kubernetes.io/projected/cc216e22-3106-40fb-9f13-fddc321b8394-kube-api-access-sm9sj\") pod \"cinder-c732-account-create-update-srkl5\" (UID: \"cc216e22-3106-40fb-9f13-fddc321b8394\") " pod="openstack/cinder-c732-account-create-update-srkl5" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.928430 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhhwq\" (UniqueName: \"kubernetes.io/projected/5c8ae872-f33a-4027-b4f4-ed0baf60a19f-kube-api-access-hhhwq\") pod \"barbican-db-create-plwsw\" (UID: \"5c8ae872-f33a-4027-b4f4-ed0baf60a19f\") " pod="openstack/barbican-db-create-plwsw" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.937588 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bebd962-3db8-45c9-9de7-a96a207a8b12-config-data\") pod \"keystone-db-sync-xhr9h\" (UID: \"2bebd962-3db8-45c9-9de7-a96a207a8b12\") " pod="openstack/keystone-db-sync-xhr9h" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.984681 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-67lpp-config-5kmkv"] Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.998557 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6b6c\" (UniqueName: \"kubernetes.io/projected/c8fb662f-4673-4364-ba6b-9ff02fa23b4e-kube-api-access-x6b6c\") pod \"neutron-db-create-7zdvv\" (UID: \"c8fb662f-4673-4364-ba6b-9ff02fa23b4e\") " pod="openstack/neutron-db-create-7zdvv" Feb 27 08:49:46 crc kubenswrapper[4906]: I0227 08:49:46.998738 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8fb662f-4673-4364-ba6b-9ff02fa23b4e-operator-scripts\") pod \"neutron-db-create-7zdvv\" (UID: \"c8fb662f-4673-4364-ba6b-9ff02fa23b4e\") " pod="openstack/neutron-db-create-7zdvv" Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.012854 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xhr9h" Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.036583 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c732-account-create-update-srkl5" Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.058490 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-plwsw" Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.102120 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6b6c\" (UniqueName: \"kubernetes.io/projected/c8fb662f-4673-4364-ba6b-9ff02fa23b4e-kube-api-access-x6b6c\") pod \"neutron-db-create-7zdvv\" (UID: \"c8fb662f-4673-4364-ba6b-9ff02fa23b4e\") " pod="openstack/neutron-db-create-7zdvv" Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.102322 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8fb662f-4673-4364-ba6b-9ff02fa23b4e-operator-scripts\") pod \"neutron-db-create-7zdvv\" (UID: \"c8fb662f-4673-4364-ba6b-9ff02fa23b4e\") " pod="openstack/neutron-db-create-7zdvv" Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.104531 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8fb662f-4673-4364-ba6b-9ff02fa23b4e-operator-scripts\") pod \"neutron-db-create-7zdvv\" (UID: \"c8fb662f-4673-4364-ba6b-9ff02fa23b4e\") " pod="openstack/neutron-db-create-7zdvv" Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.105345 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-1668-account-create-update-q4g7s"] Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.106754 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-1668-account-create-update-q4g7s" Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.109800 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.130568 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6b6c\" (UniqueName: \"kubernetes.io/projected/c8fb662f-4673-4364-ba6b-9ff02fa23b4e-kube-api-access-x6b6c\") pod \"neutron-db-create-7zdvv\" (UID: \"c8fb662f-4673-4364-ba6b-9ff02fa23b4e\") " pod="openstack/neutron-db-create-7zdvv" Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.131710 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-1668-account-create-update-q4g7s"] Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.204030 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvj5p\" (UniqueName: \"kubernetes.io/projected/0e0d50eb-c3dc-456b-9966-493e163facdf-kube-api-access-jvj5p\") pod \"neutron-1668-account-create-update-q4g7s\" (UID: \"0e0d50eb-c3dc-456b-9966-493e163facdf\") " pod="openstack/neutron-1668-account-create-update-q4g7s" Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.204074 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e0d50eb-c3dc-456b-9966-493e163facdf-operator-scripts\") pod \"neutron-1668-account-create-update-q4g7s\" (UID: \"0e0d50eb-c3dc-456b-9966-493e163facdf\") " pod="openstack/neutron-1668-account-create-update-q4g7s" Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.279681 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-7zdvv" Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.306128 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvj5p\" (UniqueName: \"kubernetes.io/projected/0e0d50eb-c3dc-456b-9966-493e163facdf-kube-api-access-jvj5p\") pod \"neutron-1668-account-create-update-q4g7s\" (UID: \"0e0d50eb-c3dc-456b-9966-493e163facdf\") " pod="openstack/neutron-1668-account-create-update-q4g7s" Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.306199 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e0d50eb-c3dc-456b-9966-493e163facdf-operator-scripts\") pod \"neutron-1668-account-create-update-q4g7s\" (UID: \"0e0d50eb-c3dc-456b-9966-493e163facdf\") " pod="openstack/neutron-1668-account-create-update-q4g7s" Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.307868 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e0d50eb-c3dc-456b-9966-493e163facdf-operator-scripts\") pod \"neutron-1668-account-create-update-q4g7s\" (UID: \"0e0d50eb-c3dc-456b-9966-493e163facdf\") " pod="openstack/neutron-1668-account-create-update-q4g7s" Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.329811 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvj5p\" (UniqueName: \"kubernetes.io/projected/0e0d50eb-c3dc-456b-9966-493e163facdf-kube-api-access-jvj5p\") pod \"neutron-1668-account-create-update-q4g7s\" (UID: \"0e0d50eb-c3dc-456b-9966-493e163facdf\") " pod="openstack/neutron-1668-account-create-update-q4g7s" Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.339233 4906 generic.go:334] "Generic (PLEG): container finished" podID="81c574b4-61cc-4790-93c5-a08e34058cbe" containerID="789a063d212389d5c48795acd28ea9d6e627e9514c7541c5ef82a3f130bc87e6" exitCode=0 Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.339303 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-lp8rr" event={"ID":"81c574b4-61cc-4790-93c5-a08e34058cbe","Type":"ContainerDied","Data":"789a063d212389d5c48795acd28ea9d6e627e9514c7541c5ef82a3f130bc87e6"} Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.462903 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-1668-account-create-update-q4g7s" Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.475638 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-7nq6g"] Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.559648 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-0045-account-create-update-wqm68"] Feb 27 08:49:47 crc kubenswrapper[4906]: W0227 08:49:47.797664 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c8ae872_f33a_4027_b4f4_ed0baf60a19f.slice/crio-1d033b4335a5ae3ce519e1409b5dc847ce17838e13ad4ffd7663e3948f7fcac2 WatchSource:0}: Error finding container 1d033b4335a5ae3ce519e1409b5dc847ce17838e13ad4ffd7663e3948f7fcac2: Status 404 returned error can't find the container with id 1d033b4335a5ae3ce519e1409b5dc847ce17838e13ad4ffd7663e3948f7fcac2 Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.804678 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-plwsw"] Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.815615 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-xhr9h"] Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.833734 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-c732-account-create-update-srkl5"] Feb 27 08:49:47 crc kubenswrapper[4906]: I0227 08:49:47.841560 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-7zdvv"] Feb 27 08:49:47 crc kubenswrapper[4906]: W0227 08:49:47.845627 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2bebd962_3db8_45c9_9de7_a96a207a8b12.slice/crio-2cf49dab941c998267843cde78db9df8186fa0a9dd3e90776789de09bd6b7bfd WatchSource:0}: Error finding container 2cf49dab941c998267843cde78db9df8186fa0a9dd3e90776789de09bd6b7bfd: Status 404 returned error can't find the container with id 2cf49dab941c998267843cde78db9df8186fa0a9dd3e90776789de09bd6b7bfd Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.151005 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-1668-account-create-update-q4g7s"] Feb 27 08:49:48 crc kubenswrapper[4906]: W0227 08:49:48.169831 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0e0d50eb_c3dc_456b_9966_493e163facdf.slice/crio-f4e42bee480867d01c99e548523bf037030c4a27b6179649c012544a9e42eacf WatchSource:0}: Error finding container f4e42bee480867d01c99e548523bf037030c4a27b6179649c012544a9e42eacf: Status 404 returned error can't find the container with id f4e42bee480867d01c99e548523bf037030c4a27b6179649c012544a9e42eacf Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.353391 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-plwsw" event={"ID":"5c8ae872-f33a-4027-b4f4-ed0baf60a19f","Type":"ContainerStarted","Data":"79cf6d7092e960d31ba6e89407c1cdc3e677efa8fb3204903d642d98d4e61764"} Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.353454 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-plwsw" event={"ID":"5c8ae872-f33a-4027-b4f4-ed0baf60a19f","Type":"ContainerStarted","Data":"1d033b4335a5ae3ce519e1409b5dc847ce17838e13ad4ffd7663e3948f7fcac2"} Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.356039 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-7zdvv" event={"ID":"c8fb662f-4673-4364-ba6b-9ff02fa23b4e","Type":"ContainerStarted","Data":"036ffbec054a33b6847ce56b5326ba4cecddefe32239438ce69e33d8ce6e3e99"} Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.356071 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-7zdvv" event={"ID":"c8fb662f-4673-4364-ba6b-9ff02fa23b4e","Type":"ContainerStarted","Data":"72194ff94f1069d7e51bd0a2ace9fc95c5f44af174eadeb1ffcb1bc61eb0735b"} Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.358094 4906 generic.go:334] "Generic (PLEG): container finished" podID="1c0d44ec-1d13-43a5-90b9-14282b765fdc" containerID="a68840212c3d548fdbe7cf32a761361a560bcb1df3ef9e2967c2580f4347d649" exitCode=0 Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.358251 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0045-account-create-update-wqm68" event={"ID":"1c0d44ec-1d13-43a5-90b9-14282b765fdc","Type":"ContainerDied","Data":"a68840212c3d548fdbe7cf32a761361a560bcb1df3ef9e2967c2580f4347d649"} Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.358296 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0045-account-create-update-wqm68" event={"ID":"1c0d44ec-1d13-43a5-90b9-14282b765fdc","Type":"ContainerStarted","Data":"ab548dd0d76a5e7ce3c80c595839197ed85bddf4d22e91dfe427a4c3f95c58ae"} Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.360498 4906 generic.go:334] "Generic (PLEG): container finished" podID="17434f9b-c5b6-4828-a71e-d8ebefaf9aba" containerID="d8339b31fbe813b2b890b166fe48a7cecf5924a9cd5ee98c6afb5d41c3bfce3f" exitCode=0 Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.361038 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-7nq6g" event={"ID":"17434f9b-c5b6-4828-a71e-d8ebefaf9aba","Type":"ContainerDied","Data":"d8339b31fbe813b2b890b166fe48a7cecf5924a9cd5ee98c6afb5d41c3bfce3f"} Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.361067 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-7nq6g" event={"ID":"17434f9b-c5b6-4828-a71e-d8ebefaf9aba","Type":"ContainerStarted","Data":"03750187dbb806b5adaced624237772ee2afbd69b4a0b6443043c04f9163bd8b"} Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.367414 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-1668-account-create-update-q4g7s" event={"ID":"0e0d50eb-c3dc-456b-9966-493e163facdf","Type":"ContainerStarted","Data":"603bfcd590be81ba2ee40841d59a8b26694906f7e62cd5d651bf832bf01557ac"} Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.367468 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-1668-account-create-update-q4g7s" event={"ID":"0e0d50eb-c3dc-456b-9966-493e163facdf","Type":"ContainerStarted","Data":"f4e42bee480867d01c99e548523bf037030c4a27b6179649c012544a9e42eacf"} Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.369559 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-plwsw" podStartSLOduration=2.369536289 podStartE2EDuration="2.369536289s" podCreationTimestamp="2026-02-27 08:49:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:49:48.36807127 +0000 UTC m=+1286.762472880" watchObservedRunningTime="2026-02-27 08:49:48.369536289 +0000 UTC m=+1286.763937899" Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.378999 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xhr9h" event={"ID":"2bebd962-3db8-45c9-9de7-a96a207a8b12","Type":"ContainerStarted","Data":"2cf49dab941c998267843cde78db9df8186fa0a9dd3e90776789de09bd6b7bfd"} Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.391120 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c732-account-create-update-srkl5" event={"ID":"cc216e22-3106-40fb-9f13-fddc321b8394","Type":"ContainerStarted","Data":"e7ef7b403767c0dd4412bfaa469ab7da53a63e63af0a30adebf41d367d3e3822"} Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.391195 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c732-account-create-update-srkl5" event={"ID":"cc216e22-3106-40fb-9f13-fddc321b8394","Type":"ContainerStarted","Data":"08b95c02e83b80e0dc204eeb2a07a664c44b8522abbb845eab152b4d6fe78885"} Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.394326 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-1668-account-create-update-q4g7s" podStartSLOduration=1.394304319 podStartE2EDuration="1.394304319s" podCreationTimestamp="2026-02-27 08:49:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:49:48.38978009 +0000 UTC m=+1286.784181690" watchObservedRunningTime="2026-02-27 08:49:48.394304319 +0000 UTC m=+1286.788705929" Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.488367 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-c732-account-create-update-srkl5" podStartSLOduration=2.488336368 podStartE2EDuration="2.488336368s" podCreationTimestamp="2026-02-27 08:49:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:49:48.472766269 +0000 UTC m=+1286.867167879" watchObservedRunningTime="2026-02-27 08:49:48.488336368 +0000 UTC m=+1286.882737978" Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.568093 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aceaa9f9-225f-4194-ae36-f82910ac4367" path="/var/lib/kubelet/pods/aceaa9f9-225f-4194-ae36-f82910ac4367/volumes" Feb 27 08:49:48 crc kubenswrapper[4906]: I0227 08:49:48.905260 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-lp8rr" Feb 27 08:49:49 crc kubenswrapper[4906]: I0227 08:49:49.085075 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81c574b4-61cc-4790-93c5-a08e34058cbe-operator-scripts\") pod \"81c574b4-61cc-4790-93c5-a08e34058cbe\" (UID: \"81c574b4-61cc-4790-93c5-a08e34058cbe\") " Feb 27 08:49:49 crc kubenswrapper[4906]: I0227 08:49:49.085723 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bq9mw\" (UniqueName: \"kubernetes.io/projected/81c574b4-61cc-4790-93c5-a08e34058cbe-kube-api-access-bq9mw\") pod \"81c574b4-61cc-4790-93c5-a08e34058cbe\" (UID: \"81c574b4-61cc-4790-93c5-a08e34058cbe\") " Feb 27 08:49:49 crc kubenswrapper[4906]: I0227 08:49:49.086027 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81c574b4-61cc-4790-93c5-a08e34058cbe-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "81c574b4-61cc-4790-93c5-a08e34058cbe" (UID: "81c574b4-61cc-4790-93c5-a08e34058cbe"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:49 crc kubenswrapper[4906]: I0227 08:49:49.086694 4906 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81c574b4-61cc-4790-93c5-a08e34058cbe-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:49 crc kubenswrapper[4906]: I0227 08:49:49.097132 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81c574b4-61cc-4790-93c5-a08e34058cbe-kube-api-access-bq9mw" (OuterVolumeSpecName: "kube-api-access-bq9mw") pod "81c574b4-61cc-4790-93c5-a08e34058cbe" (UID: "81c574b4-61cc-4790-93c5-a08e34058cbe"). InnerVolumeSpecName "kube-api-access-bq9mw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:49:49 crc kubenswrapper[4906]: I0227 08:49:49.188857 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bq9mw\" (UniqueName: \"kubernetes.io/projected/81c574b4-61cc-4790-93c5-a08e34058cbe-kube-api-access-bq9mw\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:49 crc kubenswrapper[4906]: I0227 08:49:49.404325 4906 generic.go:334] "Generic (PLEG): container finished" podID="cc216e22-3106-40fb-9f13-fddc321b8394" containerID="e7ef7b403767c0dd4412bfaa469ab7da53a63e63af0a30adebf41d367d3e3822" exitCode=0 Feb 27 08:49:49 crc kubenswrapper[4906]: I0227 08:49:49.404466 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c732-account-create-update-srkl5" event={"ID":"cc216e22-3106-40fb-9f13-fddc321b8394","Type":"ContainerDied","Data":"e7ef7b403767c0dd4412bfaa469ab7da53a63e63af0a30adebf41d367d3e3822"} Feb 27 08:49:49 crc kubenswrapper[4906]: I0227 08:49:49.413708 4906 generic.go:334] "Generic (PLEG): container finished" podID="5c8ae872-f33a-4027-b4f4-ed0baf60a19f" containerID="79cf6d7092e960d31ba6e89407c1cdc3e677efa8fb3204903d642d98d4e61764" exitCode=0 Feb 27 08:49:49 crc kubenswrapper[4906]: I0227 08:49:49.413813 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-plwsw" event={"ID":"5c8ae872-f33a-4027-b4f4-ed0baf60a19f","Type":"ContainerDied","Data":"79cf6d7092e960d31ba6e89407c1cdc3e677efa8fb3204903d642d98d4e61764"} Feb 27 08:49:49 crc kubenswrapper[4906]: I0227 08:49:49.416517 4906 generic.go:334] "Generic (PLEG): container finished" podID="c8fb662f-4673-4364-ba6b-9ff02fa23b4e" containerID="036ffbec054a33b6847ce56b5326ba4cecddefe32239438ce69e33d8ce6e3e99" exitCode=0 Feb 27 08:49:49 crc kubenswrapper[4906]: I0227 08:49:49.416590 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-7zdvv" event={"ID":"c8fb662f-4673-4364-ba6b-9ff02fa23b4e","Type":"ContainerDied","Data":"036ffbec054a33b6847ce56b5326ba4cecddefe32239438ce69e33d8ce6e3e99"} Feb 27 08:49:49 crc kubenswrapper[4906]: I0227 08:49:49.431283 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-lp8rr" Feb 27 08:49:49 crc kubenswrapper[4906]: I0227 08:49:49.431307 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-lp8rr" event={"ID":"81c574b4-61cc-4790-93c5-a08e34058cbe","Type":"ContainerDied","Data":"0f5cc39e5eb27ff03fddfee227b08c0ea414da5cec7cdae68bd7a05dfec3db0c"} Feb 27 08:49:49 crc kubenswrapper[4906]: I0227 08:49:49.431460 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f5cc39e5eb27ff03fddfee227b08c0ea414da5cec7cdae68bd7a05dfec3db0c" Feb 27 08:49:49 crc kubenswrapper[4906]: I0227 08:49:49.441574 4906 generic.go:334] "Generic (PLEG): container finished" podID="0e0d50eb-c3dc-456b-9966-493e163facdf" containerID="603bfcd590be81ba2ee40841d59a8b26694906f7e62cd5d651bf832bf01557ac" exitCode=0 Feb 27 08:49:49 crc kubenswrapper[4906]: I0227 08:49:49.441678 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-1668-account-create-update-q4g7s" event={"ID":"0e0d50eb-c3dc-456b-9966-493e163facdf","Type":"ContainerDied","Data":"603bfcd590be81ba2ee40841d59a8b26694906f7e62cd5d651bf832bf01557ac"} Feb 27 08:49:49 crc kubenswrapper[4906]: I0227 08:49:49.900137 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-7zdvv" Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.008689 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8fb662f-4673-4364-ba6b-9ff02fa23b4e-operator-scripts\") pod \"c8fb662f-4673-4364-ba6b-9ff02fa23b4e\" (UID: \"c8fb662f-4673-4364-ba6b-9ff02fa23b4e\") " Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.008834 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x6b6c\" (UniqueName: \"kubernetes.io/projected/c8fb662f-4673-4364-ba6b-9ff02fa23b4e-kube-api-access-x6b6c\") pod \"c8fb662f-4673-4364-ba6b-9ff02fa23b4e\" (UID: \"c8fb662f-4673-4364-ba6b-9ff02fa23b4e\") " Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.011608 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c8fb662f-4673-4364-ba6b-9ff02fa23b4e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c8fb662f-4673-4364-ba6b-9ff02fa23b4e" (UID: "c8fb662f-4673-4364-ba6b-9ff02fa23b4e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.014345 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8fb662f-4673-4364-ba6b-9ff02fa23b4e-kube-api-access-x6b6c" (OuterVolumeSpecName: "kube-api-access-x6b6c") pod "c8fb662f-4673-4364-ba6b-9ff02fa23b4e" (UID: "c8fb662f-4673-4364-ba6b-9ff02fa23b4e"). InnerVolumeSpecName "kube-api-access-x6b6c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.032798 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0045-account-create-update-wqm68" Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.036081 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-7nq6g" Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.111247 4906 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c8fb662f-4673-4364-ba6b-9ff02fa23b4e-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.111295 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x6b6c\" (UniqueName: \"kubernetes.io/projected/c8fb662f-4673-4364-ba6b-9ff02fa23b4e-kube-api-access-x6b6c\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.216974 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c0d44ec-1d13-43a5-90b9-14282b765fdc-operator-scripts\") pod \"1c0d44ec-1d13-43a5-90b9-14282b765fdc\" (UID: \"1c0d44ec-1d13-43a5-90b9-14282b765fdc\") " Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.217076 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c7fc6\" (UniqueName: \"kubernetes.io/projected/17434f9b-c5b6-4828-a71e-d8ebefaf9aba-kube-api-access-c7fc6\") pod \"17434f9b-c5b6-4828-a71e-d8ebefaf9aba\" (UID: \"17434f9b-c5b6-4828-a71e-d8ebefaf9aba\") " Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.217298 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17434f9b-c5b6-4828-a71e-d8ebefaf9aba-operator-scripts\") pod \"17434f9b-c5b6-4828-a71e-d8ebefaf9aba\" (UID: \"17434f9b-c5b6-4828-a71e-d8ebefaf9aba\") " Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.217326 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jq6cv\" (UniqueName: \"kubernetes.io/projected/1c0d44ec-1d13-43a5-90b9-14282b765fdc-kube-api-access-jq6cv\") pod \"1c0d44ec-1d13-43a5-90b9-14282b765fdc\" (UID: \"1c0d44ec-1d13-43a5-90b9-14282b765fdc\") " Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.217648 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c0d44ec-1d13-43a5-90b9-14282b765fdc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1c0d44ec-1d13-43a5-90b9-14282b765fdc" (UID: "1c0d44ec-1d13-43a5-90b9-14282b765fdc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.217960 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/17434f9b-c5b6-4828-a71e-d8ebefaf9aba-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "17434f9b-c5b6-4828-a71e-d8ebefaf9aba" (UID: "17434f9b-c5b6-4828-a71e-d8ebefaf9aba"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.218376 4906 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c0d44ec-1d13-43a5-90b9-14282b765fdc-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.218487 4906 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/17434f9b-c5b6-4828-a71e-d8ebefaf9aba-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.221729 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17434f9b-c5b6-4828-a71e-d8ebefaf9aba-kube-api-access-c7fc6" (OuterVolumeSpecName: "kube-api-access-c7fc6") pod "17434f9b-c5b6-4828-a71e-d8ebefaf9aba" (UID: "17434f9b-c5b6-4828-a71e-d8ebefaf9aba"). InnerVolumeSpecName "kube-api-access-c7fc6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.222275 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c0d44ec-1d13-43a5-90b9-14282b765fdc-kube-api-access-jq6cv" (OuterVolumeSpecName: "kube-api-access-jq6cv") pod "1c0d44ec-1d13-43a5-90b9-14282b765fdc" (UID: "1c0d44ec-1d13-43a5-90b9-14282b765fdc"). InnerVolumeSpecName "kube-api-access-jq6cv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.324204 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c7fc6\" (UniqueName: \"kubernetes.io/projected/17434f9b-c5b6-4828-a71e-d8ebefaf9aba-kube-api-access-c7fc6\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.324273 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jq6cv\" (UniqueName: \"kubernetes.io/projected/1c0d44ec-1d13-43a5-90b9-14282b765fdc-kube-api-access-jq6cv\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.455495 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-7nq6g" event={"ID":"17434f9b-c5b6-4828-a71e-d8ebefaf9aba","Type":"ContainerDied","Data":"03750187dbb806b5adaced624237772ee2afbd69b4a0b6443043c04f9163bd8b"} Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.455569 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="03750187dbb806b5adaced624237772ee2afbd69b4a0b6443043c04f9163bd8b" Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.455520 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-7nq6g" Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.462022 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-7zdvv" event={"ID":"c8fb662f-4673-4364-ba6b-9ff02fa23b4e","Type":"ContainerDied","Data":"72194ff94f1069d7e51bd0a2ace9fc95c5f44af174eadeb1ffcb1bc61eb0735b"} Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.462047 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-7zdvv" Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.462066 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="72194ff94f1069d7e51bd0a2ace9fc95c5f44af174eadeb1ffcb1bc61eb0735b" Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.464665 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0045-account-create-update-wqm68" Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.467497 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0045-account-create-update-wqm68" event={"ID":"1c0d44ec-1d13-43a5-90b9-14282b765fdc","Type":"ContainerDied","Data":"ab548dd0d76a5e7ce3c80c595839197ed85bddf4d22e91dfe427a4c3f95c58ae"} Feb 27 08:49:50 crc kubenswrapper[4906]: I0227 08:49:50.467541 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab548dd0d76a5e7ce3c80c595839197ed85bddf4d22e91dfe427a4c3f95c58ae" Feb 27 08:49:50 crc kubenswrapper[4906]: E0227 08:49:50.516311 4906 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c0d44ec_1d13_43a5_90b9_14282b765fdc.slice/crio-ab548dd0d76a5e7ce3c80c595839197ed85bddf4d22e91dfe427a4c3f95c58ae\": RecentStats: unable to find data in memory cache]" Feb 27 08:49:54 crc kubenswrapper[4906]: I0227 08:49:54.845103 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:49:54 crc kubenswrapper[4906]: I0227 08:49:54.846080 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:49:57 crc kubenswrapper[4906]: I0227 08:49:57.464729 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-plwsw" Feb 27 08:49:57 crc kubenswrapper[4906]: I0227 08:49:57.543448 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-plwsw" event={"ID":"5c8ae872-f33a-4027-b4f4-ed0baf60a19f","Type":"ContainerDied","Data":"1d033b4335a5ae3ce519e1409b5dc847ce17838e13ad4ffd7663e3948f7fcac2"} Feb 27 08:49:57 crc kubenswrapper[4906]: I0227 08:49:57.543876 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d033b4335a5ae3ce519e1409b5dc847ce17838e13ad4ffd7663e3948f7fcac2" Feb 27 08:49:57 crc kubenswrapper[4906]: I0227 08:49:57.543559 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-plwsw" Feb 27 08:49:57 crc kubenswrapper[4906]: I0227 08:49:57.570204 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c8ae872-f33a-4027-b4f4-ed0baf60a19f-operator-scripts\") pod \"5c8ae872-f33a-4027-b4f4-ed0baf60a19f\" (UID: \"5c8ae872-f33a-4027-b4f4-ed0baf60a19f\") " Feb 27 08:49:57 crc kubenswrapper[4906]: I0227 08:49:57.570394 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hhhwq\" (UniqueName: \"kubernetes.io/projected/5c8ae872-f33a-4027-b4f4-ed0baf60a19f-kube-api-access-hhhwq\") pod \"5c8ae872-f33a-4027-b4f4-ed0baf60a19f\" (UID: \"5c8ae872-f33a-4027-b4f4-ed0baf60a19f\") " Feb 27 08:49:57 crc kubenswrapper[4906]: I0227 08:49:57.571659 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c8ae872-f33a-4027-b4f4-ed0baf60a19f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5c8ae872-f33a-4027-b4f4-ed0baf60a19f" (UID: "5c8ae872-f33a-4027-b4f4-ed0baf60a19f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:49:57 crc kubenswrapper[4906]: I0227 08:49:57.584413 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c8ae872-f33a-4027-b4f4-ed0baf60a19f-kube-api-access-hhhwq" (OuterVolumeSpecName: "kube-api-access-hhhwq") pod "5c8ae872-f33a-4027-b4f4-ed0baf60a19f" (UID: "5c8ae872-f33a-4027-b4f4-ed0baf60a19f"). InnerVolumeSpecName "kube-api-access-hhhwq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:49:57 crc kubenswrapper[4906]: I0227 08:49:57.673820 4906 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c8ae872-f33a-4027-b4f4-ed0baf60a19f-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:49:57 crc kubenswrapper[4906]: I0227 08:49:57.673935 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hhhwq\" (UniqueName: \"kubernetes.io/projected/5c8ae872-f33a-4027-b4f4-ed0baf60a19f-kube-api-access-hhhwq\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:00 crc kubenswrapper[4906]: I0227 08:50:00.166116 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536370-qtrvs"] Feb 27 08:50:00 crc kubenswrapper[4906]: E0227 08:50:00.167198 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8fb662f-4673-4364-ba6b-9ff02fa23b4e" containerName="mariadb-database-create" Feb 27 08:50:00 crc kubenswrapper[4906]: I0227 08:50:00.167214 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8fb662f-4673-4364-ba6b-9ff02fa23b4e" containerName="mariadb-database-create" Feb 27 08:50:00 crc kubenswrapper[4906]: E0227 08:50:00.167232 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17434f9b-c5b6-4828-a71e-d8ebefaf9aba" containerName="mariadb-database-create" Feb 27 08:50:00 crc kubenswrapper[4906]: I0227 08:50:00.167239 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="17434f9b-c5b6-4828-a71e-d8ebefaf9aba" containerName="mariadb-database-create" Feb 27 08:50:00 crc kubenswrapper[4906]: E0227 08:50:00.167253 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81c574b4-61cc-4790-93c5-a08e34058cbe" containerName="mariadb-account-create-update" Feb 27 08:50:00 crc kubenswrapper[4906]: I0227 08:50:00.167260 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="81c574b4-61cc-4790-93c5-a08e34058cbe" containerName="mariadb-account-create-update" Feb 27 08:50:00 crc kubenswrapper[4906]: E0227 08:50:00.167273 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c0d44ec-1d13-43a5-90b9-14282b765fdc" containerName="mariadb-account-create-update" Feb 27 08:50:00 crc kubenswrapper[4906]: I0227 08:50:00.167280 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c0d44ec-1d13-43a5-90b9-14282b765fdc" containerName="mariadb-account-create-update" Feb 27 08:50:00 crc kubenswrapper[4906]: E0227 08:50:00.167308 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c8ae872-f33a-4027-b4f4-ed0baf60a19f" containerName="mariadb-database-create" Feb 27 08:50:00 crc kubenswrapper[4906]: I0227 08:50:00.167314 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c8ae872-f33a-4027-b4f4-ed0baf60a19f" containerName="mariadb-database-create" Feb 27 08:50:00 crc kubenswrapper[4906]: I0227 08:50:00.169133 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c0d44ec-1d13-43a5-90b9-14282b765fdc" containerName="mariadb-account-create-update" Feb 27 08:50:00 crc kubenswrapper[4906]: I0227 08:50:00.169207 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8fb662f-4673-4364-ba6b-9ff02fa23b4e" containerName="mariadb-database-create" Feb 27 08:50:00 crc kubenswrapper[4906]: I0227 08:50:00.169262 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="81c574b4-61cc-4790-93c5-a08e34058cbe" containerName="mariadb-account-create-update" Feb 27 08:50:00 crc kubenswrapper[4906]: I0227 08:50:00.169289 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="17434f9b-c5b6-4828-a71e-d8ebefaf9aba" containerName="mariadb-database-create" Feb 27 08:50:00 crc kubenswrapper[4906]: I0227 08:50:00.169310 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c8ae872-f33a-4027-b4f4-ed0baf60a19f" containerName="mariadb-database-create" Feb 27 08:50:00 crc kubenswrapper[4906]: I0227 08:50:00.176561 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536370-qtrvs" Feb 27 08:50:00 crc kubenswrapper[4906]: I0227 08:50:00.180777 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 08:50:00 crc kubenswrapper[4906]: I0227 08:50:00.181478 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 08:50:00 crc kubenswrapper[4906]: I0227 08:50:00.182104 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 08:50:00 crc kubenswrapper[4906]: I0227 08:50:00.209404 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536370-qtrvs"] Feb 27 08:50:00 crc kubenswrapper[4906]: I0227 08:50:00.333011 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mwbv\" (UniqueName: \"kubernetes.io/projected/e984b25a-d687-4a9b-a92a-b5e55d37ce8f-kube-api-access-9mwbv\") pod \"auto-csr-approver-29536370-qtrvs\" (UID: \"e984b25a-d687-4a9b-a92a-b5e55d37ce8f\") " pod="openshift-infra/auto-csr-approver-29536370-qtrvs" Feb 27 08:50:00 crc kubenswrapper[4906]: I0227 08:50:00.435709 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mwbv\" (UniqueName: \"kubernetes.io/projected/e984b25a-d687-4a9b-a92a-b5e55d37ce8f-kube-api-access-9mwbv\") pod \"auto-csr-approver-29536370-qtrvs\" (UID: \"e984b25a-d687-4a9b-a92a-b5e55d37ce8f\") " pod="openshift-infra/auto-csr-approver-29536370-qtrvs" Feb 27 08:50:00 crc kubenswrapper[4906]: I0227 08:50:00.463446 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mwbv\" (UniqueName: \"kubernetes.io/projected/e984b25a-d687-4a9b-a92a-b5e55d37ce8f-kube-api-access-9mwbv\") pod \"auto-csr-approver-29536370-qtrvs\" (UID: \"e984b25a-d687-4a9b-a92a-b5e55d37ce8f\") " pod="openshift-infra/auto-csr-approver-29536370-qtrvs" Feb 27 08:50:00 crc kubenswrapper[4906]: I0227 08:50:00.507287 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536370-qtrvs" Feb 27 08:50:06 crc kubenswrapper[4906]: E0227 08:50:06.645099 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Feb 27 08:50:06 crc kubenswrapper[4906]: E0227 08:50:06.647581 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kk88q,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-sgch9_openstack(e2e1d22f-d9ac-467f-81f9-e4a4282f46aa): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:50:06 crc kubenswrapper[4906]: E0227 08:50:06.648859 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-sgch9" podUID="e2e1d22f-d9ac-467f-81f9-e4a4282f46aa" Feb 27 08:50:06 crc kubenswrapper[4906]: I0227 08:50:06.924410 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-1668-account-create-update-q4g7s" Feb 27 08:50:06 crc kubenswrapper[4906]: I0227 08:50:06.972139 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c732-account-create-update-srkl5" Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.086031 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvj5p\" (UniqueName: \"kubernetes.io/projected/0e0d50eb-c3dc-456b-9966-493e163facdf-kube-api-access-jvj5p\") pod \"0e0d50eb-c3dc-456b-9966-493e163facdf\" (UID: \"0e0d50eb-c3dc-456b-9966-493e163facdf\") " Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.086107 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sm9sj\" (UniqueName: \"kubernetes.io/projected/cc216e22-3106-40fb-9f13-fddc321b8394-kube-api-access-sm9sj\") pod \"cc216e22-3106-40fb-9f13-fddc321b8394\" (UID: \"cc216e22-3106-40fb-9f13-fddc321b8394\") " Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.086142 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc216e22-3106-40fb-9f13-fddc321b8394-operator-scripts\") pod \"cc216e22-3106-40fb-9f13-fddc321b8394\" (UID: \"cc216e22-3106-40fb-9f13-fddc321b8394\") " Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.086588 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e0d50eb-c3dc-456b-9966-493e163facdf-operator-scripts\") pod \"0e0d50eb-c3dc-456b-9966-493e163facdf\" (UID: \"0e0d50eb-c3dc-456b-9966-493e163facdf\") " Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.087303 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc216e22-3106-40fb-9f13-fddc321b8394-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cc216e22-3106-40fb-9f13-fddc321b8394" (UID: "cc216e22-3106-40fb-9f13-fddc321b8394"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.087317 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e0d50eb-c3dc-456b-9966-493e163facdf-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0e0d50eb-c3dc-456b-9966-493e163facdf" (UID: "0e0d50eb-c3dc-456b-9966-493e163facdf"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.093617 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc216e22-3106-40fb-9f13-fddc321b8394-kube-api-access-sm9sj" (OuterVolumeSpecName: "kube-api-access-sm9sj") pod "cc216e22-3106-40fb-9f13-fddc321b8394" (UID: "cc216e22-3106-40fb-9f13-fddc321b8394"). InnerVolumeSpecName "kube-api-access-sm9sj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.099569 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e0d50eb-c3dc-456b-9966-493e163facdf-kube-api-access-jvj5p" (OuterVolumeSpecName: "kube-api-access-jvj5p") pod "0e0d50eb-c3dc-456b-9966-493e163facdf" (UID: "0e0d50eb-c3dc-456b-9966-493e163facdf"). InnerVolumeSpecName "kube-api-access-jvj5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.191026 4906 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0e0d50eb-c3dc-456b-9966-493e163facdf-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.191094 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvj5p\" (UniqueName: \"kubernetes.io/projected/0e0d50eb-c3dc-456b-9966-493e163facdf-kube-api-access-jvj5p\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.191111 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sm9sj\" (UniqueName: \"kubernetes.io/projected/cc216e22-3106-40fb-9f13-fddc321b8394-kube-api-access-sm9sj\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.191125 4906 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc216e22-3106-40fb-9f13-fddc321b8394-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.253800 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536370-qtrvs"] Feb 27 08:50:07 crc kubenswrapper[4906]: W0227 08:50:07.265640 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode984b25a_d687_4a9b_a92a_b5e55d37ce8f.slice/crio-0574e0da792d2815fef782b268256c77cc20a994723818b01d84139be2f68341 WatchSource:0}: Error finding container 0574e0da792d2815fef782b268256c77cc20a994723818b01d84139be2f68341: Status 404 returned error can't find the container with id 0574e0da792d2815fef782b268256c77cc20a994723818b01d84139be2f68341 Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.635449 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536370-qtrvs" event={"ID":"e984b25a-d687-4a9b-a92a-b5e55d37ce8f","Type":"ContainerStarted","Data":"0574e0da792d2815fef782b268256c77cc20a994723818b01d84139be2f68341"} Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.638276 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-c732-account-create-update-srkl5" Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.638950 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-c732-account-create-update-srkl5" event={"ID":"cc216e22-3106-40fb-9f13-fddc321b8394","Type":"ContainerDied","Data":"08b95c02e83b80e0dc204eeb2a07a664c44b8522abbb845eab152b4d6fe78885"} Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.639018 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="08b95c02e83b80e0dc204eeb2a07a664c44b8522abbb845eab152b4d6fe78885" Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.641869 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-6jk9r" event={"ID":"db586541-2471-4a37-a7b6-3c8f324a696b","Type":"ContainerStarted","Data":"436ee7e9f47d89512517e01527c611a34144233f54e765ce6e2320b293ad4074"} Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.647198 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-1668-account-create-update-q4g7s" Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.647196 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-1668-account-create-update-q4g7s" event={"ID":"0e0d50eb-c3dc-456b-9966-493e163facdf","Type":"ContainerDied","Data":"f4e42bee480867d01c99e548523bf037030c4a27b6179649c012544a9e42eacf"} Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.647358 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f4e42bee480867d01c99e548523bf037030c4a27b6179649c012544a9e42eacf" Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.650804 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xhr9h" event={"ID":"2bebd962-3db8-45c9-9de7-a96a207a8b12","Type":"ContainerStarted","Data":"8ca46ee208ce75fb2d5814027ef4cef40242d734440d427136e09997625c9234"} Feb 27 08:50:07 crc kubenswrapper[4906]: E0227 08:50:07.653209 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-sgch9" podUID="e2e1d22f-d9ac-467f-81f9-e4a4282f46aa" Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.664064 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-6jk9r" podStartSLOduration=13.531252766 podStartE2EDuration="56.664042432s" podCreationTimestamp="2026-02-27 08:49:11 +0000 UTC" firstStartedPulling="2026-02-27 08:49:23.630396165 +0000 UTC m=+1262.024797775" lastFinishedPulling="2026-02-27 08:50:06.763185831 +0000 UTC m=+1305.157587441" observedRunningTime="2026-02-27 08:50:07.663410465 +0000 UTC m=+1306.057812085" watchObservedRunningTime="2026-02-27 08:50:07.664042432 +0000 UTC m=+1306.058444042" Feb 27 08:50:07 crc kubenswrapper[4906]: I0227 08:50:07.719309 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-xhr9h" podStartSLOduration=2.813981856 podStartE2EDuration="21.719284401s" podCreationTimestamp="2026-02-27 08:49:46 +0000 UTC" firstStartedPulling="2026-02-27 08:49:47.858303377 +0000 UTC m=+1286.252704987" lastFinishedPulling="2026-02-27 08:50:06.763605922 +0000 UTC m=+1305.158007532" observedRunningTime="2026-02-27 08:50:07.716010615 +0000 UTC m=+1306.110412225" watchObservedRunningTime="2026-02-27 08:50:07.719284401 +0000 UTC m=+1306.113686001" Feb 27 08:50:09 crc kubenswrapper[4906]: I0227 08:50:09.670327 4906 generic.go:334] "Generic (PLEG): container finished" podID="e984b25a-d687-4a9b-a92a-b5e55d37ce8f" containerID="0965d89b29776672733411111b048996321fea370c02c0d8fcbea29bebdbd88b" exitCode=0 Feb 27 08:50:09 crc kubenswrapper[4906]: I0227 08:50:09.670583 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536370-qtrvs" event={"ID":"e984b25a-d687-4a9b-a92a-b5e55d37ce8f","Type":"ContainerDied","Data":"0965d89b29776672733411111b048996321fea370c02c0d8fcbea29bebdbd88b"} Feb 27 08:50:10 crc kubenswrapper[4906]: I0227 08:50:10.680507 4906 generic.go:334] "Generic (PLEG): container finished" podID="2bebd962-3db8-45c9-9de7-a96a207a8b12" containerID="8ca46ee208ce75fb2d5814027ef4cef40242d734440d427136e09997625c9234" exitCode=0 Feb 27 08:50:10 crc kubenswrapper[4906]: I0227 08:50:10.680768 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xhr9h" event={"ID":"2bebd962-3db8-45c9-9de7-a96a207a8b12","Type":"ContainerDied","Data":"8ca46ee208ce75fb2d5814027ef4cef40242d734440d427136e09997625c9234"} Feb 27 08:50:11 crc kubenswrapper[4906]: I0227 08:50:11.002720 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536370-qtrvs" Feb 27 08:50:11 crc kubenswrapper[4906]: I0227 08:50:11.076504 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mwbv\" (UniqueName: \"kubernetes.io/projected/e984b25a-d687-4a9b-a92a-b5e55d37ce8f-kube-api-access-9mwbv\") pod \"e984b25a-d687-4a9b-a92a-b5e55d37ce8f\" (UID: \"e984b25a-d687-4a9b-a92a-b5e55d37ce8f\") " Feb 27 08:50:11 crc kubenswrapper[4906]: I0227 08:50:11.095326 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e984b25a-d687-4a9b-a92a-b5e55d37ce8f-kube-api-access-9mwbv" (OuterVolumeSpecName: "kube-api-access-9mwbv") pod "e984b25a-d687-4a9b-a92a-b5e55d37ce8f" (UID: "e984b25a-d687-4a9b-a92a-b5e55d37ce8f"). InnerVolumeSpecName "kube-api-access-9mwbv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:50:11 crc kubenswrapper[4906]: I0227 08:50:11.179048 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mwbv\" (UniqueName: \"kubernetes.io/projected/e984b25a-d687-4a9b-a92a-b5e55d37ce8f-kube-api-access-9mwbv\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:11 crc kubenswrapper[4906]: I0227 08:50:11.695315 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536370-qtrvs" event={"ID":"e984b25a-d687-4a9b-a92a-b5e55d37ce8f","Type":"ContainerDied","Data":"0574e0da792d2815fef782b268256c77cc20a994723818b01d84139be2f68341"} Feb 27 08:50:11 crc kubenswrapper[4906]: I0227 08:50:11.695373 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0574e0da792d2815fef782b268256c77cc20a994723818b01d84139be2f68341" Feb 27 08:50:11 crc kubenswrapper[4906]: I0227 08:50:11.695408 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536370-qtrvs" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.072499 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xhr9h" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.087362 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536364-76q6w"] Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.098500 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ghfsk\" (UniqueName: \"kubernetes.io/projected/2bebd962-3db8-45c9-9de7-a96a207a8b12-kube-api-access-ghfsk\") pod \"2bebd962-3db8-45c9-9de7-a96a207a8b12\" (UID: \"2bebd962-3db8-45c9-9de7-a96a207a8b12\") " Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.098802 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536364-76q6w"] Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.098809 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bebd962-3db8-45c9-9de7-a96a207a8b12-combined-ca-bundle\") pod \"2bebd962-3db8-45c9-9de7-a96a207a8b12\" (UID: \"2bebd962-3db8-45c9-9de7-a96a207a8b12\") " Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.099163 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bebd962-3db8-45c9-9de7-a96a207a8b12-config-data\") pod \"2bebd962-3db8-45c9-9de7-a96a207a8b12\" (UID: \"2bebd962-3db8-45c9-9de7-a96a207a8b12\") " Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.104701 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2bebd962-3db8-45c9-9de7-a96a207a8b12-kube-api-access-ghfsk" (OuterVolumeSpecName: "kube-api-access-ghfsk") pod "2bebd962-3db8-45c9-9de7-a96a207a8b12" (UID: "2bebd962-3db8-45c9-9de7-a96a207a8b12"). InnerVolumeSpecName "kube-api-access-ghfsk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.142526 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bebd962-3db8-45c9-9de7-a96a207a8b12-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2bebd962-3db8-45c9-9de7-a96a207a8b12" (UID: "2bebd962-3db8-45c9-9de7-a96a207a8b12"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.152159 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2bebd962-3db8-45c9-9de7-a96a207a8b12-config-data" (OuterVolumeSpecName: "config-data") pod "2bebd962-3db8-45c9-9de7-a96a207a8b12" (UID: "2bebd962-3db8-45c9-9de7-a96a207a8b12"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.202669 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2bebd962-3db8-45c9-9de7-a96a207a8b12-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.202985 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ghfsk\" (UniqueName: \"kubernetes.io/projected/2bebd962-3db8-45c9-9de7-a96a207a8b12-kube-api-access-ghfsk\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.203211 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2bebd962-3db8-45c9-9de7-a96a207a8b12-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.566403 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fd15954e-2839-4a77-890e-16267bbb27b6" path="/var/lib/kubelet/pods/fd15954e-2839-4a77-890e-16267bbb27b6/volumes" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.707710 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-xhr9h" event={"ID":"2bebd962-3db8-45c9-9de7-a96a207a8b12","Type":"ContainerDied","Data":"2cf49dab941c998267843cde78db9df8186fa0a9dd3e90776789de09bd6b7bfd"} Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.707767 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2cf49dab941c998267843cde78db9df8186fa0a9dd3e90776789de09bd6b7bfd" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.707839 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-xhr9h" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.913157 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-rwgqh"] Feb 27 08:50:12 crc kubenswrapper[4906]: E0227 08:50:12.913758 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc216e22-3106-40fb-9f13-fddc321b8394" containerName="mariadb-account-create-update" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.913805 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc216e22-3106-40fb-9f13-fddc321b8394" containerName="mariadb-account-create-update" Feb 27 08:50:12 crc kubenswrapper[4906]: E0227 08:50:12.913834 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e0d50eb-c3dc-456b-9966-493e163facdf" containerName="mariadb-account-create-update" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.913851 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e0d50eb-c3dc-456b-9966-493e163facdf" containerName="mariadb-account-create-update" Feb 27 08:50:12 crc kubenswrapper[4906]: E0227 08:50:12.913906 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e984b25a-d687-4a9b-a92a-b5e55d37ce8f" containerName="oc" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.913921 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="e984b25a-d687-4a9b-a92a-b5e55d37ce8f" containerName="oc" Feb 27 08:50:12 crc kubenswrapper[4906]: E0227 08:50:12.913945 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2bebd962-3db8-45c9-9de7-a96a207a8b12" containerName="keystone-db-sync" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.913958 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="2bebd962-3db8-45c9-9de7-a96a207a8b12" containerName="keystone-db-sync" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.914256 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc216e22-3106-40fb-9f13-fddc321b8394" containerName="mariadb-account-create-update" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.914289 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="2bebd962-3db8-45c9-9de7-a96a207a8b12" containerName="keystone-db-sync" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.914314 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e0d50eb-c3dc-456b-9966-493e163facdf" containerName="mariadb-account-create-update" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.914328 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="e984b25a-d687-4a9b-a92a-b5e55d37ce8f" containerName="oc" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.915850 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.929957 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-rwgqh"] Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.973803 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-m55h8"] Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.975241 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.985151 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-x96zn" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.985458 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.985618 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.990661 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 27 08:50:12 crc kubenswrapper[4906]: I0227 08:50:12.998762 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.013345 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-m55h8"] Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.020990 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-credential-keys\") pod \"keystone-bootstrap-m55h8\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.021066 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-config\") pod \"dnsmasq-dns-f877ddd87-rwgqh\" (UID: \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\") " pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.021102 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-config-data\") pod \"keystone-bootstrap-m55h8\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.021140 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-fernet-keys\") pod \"keystone-bootstrap-m55h8\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.021171 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-dns-svc\") pod \"dnsmasq-dns-f877ddd87-rwgqh\" (UID: \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\") " pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.021200 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-ovsdbserver-nb\") pod \"dnsmasq-dns-f877ddd87-rwgqh\" (UID: \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\") " pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.021223 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k54wr\" (UniqueName: \"kubernetes.io/projected/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-kube-api-access-k54wr\") pod \"dnsmasq-dns-f877ddd87-rwgqh\" (UID: \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\") " pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.021243 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-combined-ca-bundle\") pod \"keystone-bootstrap-m55h8\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.021283 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-scripts\") pod \"keystone-bootstrap-m55h8\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.021312 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g77zv\" (UniqueName: \"kubernetes.io/projected/94735536-25c1-47d8-828b-fc5f71d57552-kube-api-access-g77zv\") pod \"keystone-bootstrap-m55h8\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.021327 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-ovsdbserver-sb\") pod \"dnsmasq-dns-f877ddd87-rwgqh\" (UID: \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\") " pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.123360 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-ovsdbserver-nb\") pod \"dnsmasq-dns-f877ddd87-rwgqh\" (UID: \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\") " pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.123436 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k54wr\" (UniqueName: \"kubernetes.io/projected/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-kube-api-access-k54wr\") pod \"dnsmasq-dns-f877ddd87-rwgqh\" (UID: \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\") " pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.123470 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-combined-ca-bundle\") pod \"keystone-bootstrap-m55h8\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.123533 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-scripts\") pod \"keystone-bootstrap-m55h8\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.123576 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g77zv\" (UniqueName: \"kubernetes.io/projected/94735536-25c1-47d8-828b-fc5f71d57552-kube-api-access-g77zv\") pod \"keystone-bootstrap-m55h8\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.123601 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-ovsdbserver-sb\") pod \"dnsmasq-dns-f877ddd87-rwgqh\" (UID: \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\") " pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.123643 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-credential-keys\") pod \"keystone-bootstrap-m55h8\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.123685 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-config\") pod \"dnsmasq-dns-f877ddd87-rwgqh\" (UID: \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\") " pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.123717 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-config-data\") pod \"keystone-bootstrap-m55h8\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.123770 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-fernet-keys\") pod \"keystone-bootstrap-m55h8\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.123813 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-dns-svc\") pod \"dnsmasq-dns-f877ddd87-rwgqh\" (UID: \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\") " pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.124439 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-ovsdbserver-nb\") pod \"dnsmasq-dns-f877ddd87-rwgqh\" (UID: \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\") " pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.124957 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-dns-svc\") pod \"dnsmasq-dns-f877ddd87-rwgqh\" (UID: \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\") " pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.125155 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-ovsdbserver-sb\") pod \"dnsmasq-dns-f877ddd87-rwgqh\" (UID: \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\") " pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.126255 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-config\") pod \"dnsmasq-dns-f877ddd87-rwgqh\" (UID: \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\") " pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.135384 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-config-data\") pod \"keystone-bootstrap-m55h8\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.137834 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-74959df6fc-ql52b"] Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.139728 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-74959df6fc-ql52b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.147555 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-cr2ls" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.148447 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.148475 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.148796 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.151410 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-fernet-keys\") pod \"keystone-bootstrap-m55h8\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.153631 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g77zv\" (UniqueName: \"kubernetes.io/projected/94735536-25c1-47d8-828b-fc5f71d57552-kube-api-access-g77zv\") pod \"keystone-bootstrap-m55h8\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.164126 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-credential-keys\") pod \"keystone-bootstrap-m55h8\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.165327 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k54wr\" (UniqueName: \"kubernetes.io/projected/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-kube-api-access-k54wr\") pod \"dnsmasq-dns-f877ddd87-rwgqh\" (UID: \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\") " pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.168694 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-combined-ca-bundle\") pod \"keystone-bootstrap-m55h8\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.175697 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-74959df6fc-ql52b"] Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.195457 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-scripts\") pod \"keystone-bootstrap-m55h8\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.227057 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58tlf\" (UniqueName: \"kubernetes.io/projected/10267773-6b24-4b86-9190-a9792b4fa7a6-kube-api-access-58tlf\") pod \"horizon-74959df6fc-ql52b\" (UID: \"10267773-6b24-4b86-9190-a9792b4fa7a6\") " pod="openstack/horizon-74959df6fc-ql52b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.227143 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10267773-6b24-4b86-9190-a9792b4fa7a6-logs\") pod \"horizon-74959df6fc-ql52b\" (UID: \"10267773-6b24-4b86-9190-a9792b4fa7a6\") " pod="openstack/horizon-74959df6fc-ql52b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.227253 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/10267773-6b24-4b86-9190-a9792b4fa7a6-config-data\") pod \"horizon-74959df6fc-ql52b\" (UID: \"10267773-6b24-4b86-9190-a9792b4fa7a6\") " pod="openstack/horizon-74959df6fc-ql52b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.227296 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/10267773-6b24-4b86-9190-a9792b4fa7a6-horizon-secret-key\") pod \"horizon-74959df6fc-ql52b\" (UID: \"10267773-6b24-4b86-9190-a9792b4fa7a6\") " pod="openstack/horizon-74959df6fc-ql52b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.227313 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/10267773-6b24-4b86-9190-a9792b4fa7a6-scripts\") pod \"horizon-74959df6fc-ql52b\" (UID: \"10267773-6b24-4b86-9190-a9792b4fa7a6\") " pod="openstack/horizon-74959df6fc-ql52b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.232851 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.268820 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-67z26"] Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.272078 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-67z26" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.277341 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-48hf2" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.297423 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.297828 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.306495 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.313535 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-67z26"] Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.329149 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/10267773-6b24-4b86-9190-a9792b4fa7a6-config-data\") pod \"horizon-74959df6fc-ql52b\" (UID: \"10267773-6b24-4b86-9190-a9792b4fa7a6\") " pod="openstack/horizon-74959df6fc-ql52b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.329245 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/10267773-6b24-4b86-9190-a9792b4fa7a6-horizon-secret-key\") pod \"horizon-74959df6fc-ql52b\" (UID: \"10267773-6b24-4b86-9190-a9792b4fa7a6\") " pod="openstack/horizon-74959df6fc-ql52b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.329382 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/10267773-6b24-4b86-9190-a9792b4fa7a6-scripts\") pod \"horizon-74959df6fc-ql52b\" (UID: \"10267773-6b24-4b86-9190-a9792b4fa7a6\") " pod="openstack/horizon-74959df6fc-ql52b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.329408 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xznn9\" (UniqueName: \"kubernetes.io/projected/128b6d95-fa07-4ab9-a927-47882c406fa3-kube-api-access-xznn9\") pod \"neutron-db-sync-67z26\" (UID: \"128b6d95-fa07-4ab9-a927-47882c406fa3\") " pod="openstack/neutron-db-sync-67z26" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.330309 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/128b6d95-fa07-4ab9-a927-47882c406fa3-combined-ca-bundle\") pod \"neutron-db-sync-67z26\" (UID: \"128b6d95-fa07-4ab9-a927-47882c406fa3\") " pod="openstack/neutron-db-sync-67z26" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.330369 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58tlf\" (UniqueName: \"kubernetes.io/projected/10267773-6b24-4b86-9190-a9792b4fa7a6-kube-api-access-58tlf\") pod \"horizon-74959df6fc-ql52b\" (UID: \"10267773-6b24-4b86-9190-a9792b4fa7a6\") " pod="openstack/horizon-74959df6fc-ql52b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.331297 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10267773-6b24-4b86-9190-a9792b4fa7a6-logs\") pod \"horizon-74959df6fc-ql52b\" (UID: \"10267773-6b24-4b86-9190-a9792b4fa7a6\") " pod="openstack/horizon-74959df6fc-ql52b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.331341 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/128b6d95-fa07-4ab9-a927-47882c406fa3-config\") pod \"neutron-db-sync-67z26\" (UID: \"128b6d95-fa07-4ab9-a927-47882c406fa3\") " pod="openstack/neutron-db-sync-67z26" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.334010 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/10267773-6b24-4b86-9190-a9792b4fa7a6-scripts\") pod \"horizon-74959df6fc-ql52b\" (UID: \"10267773-6b24-4b86-9190-a9792b4fa7a6\") " pod="openstack/horizon-74959df6fc-ql52b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.334505 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10267773-6b24-4b86-9190-a9792b4fa7a6-logs\") pod \"horizon-74959df6fc-ql52b\" (UID: \"10267773-6b24-4b86-9190-a9792b4fa7a6\") " pod="openstack/horizon-74959df6fc-ql52b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.335376 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/10267773-6b24-4b86-9190-a9792b4fa7a6-config-data\") pod \"horizon-74959df6fc-ql52b\" (UID: \"10267773-6b24-4b86-9190-a9792b4fa7a6\") " pod="openstack/horizon-74959df6fc-ql52b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.341677 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-pg9nd"] Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.343311 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.350706 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/10267773-6b24-4b86-9190-a9792b4fa7a6-horizon-secret-key\") pod \"horizon-74959df6fc-ql52b\" (UID: \"10267773-6b24-4b86-9190-a9792b4fa7a6\") " pod="openstack/horizon-74959df6fc-ql52b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.352229 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-pg9nd"] Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.390628 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.399627 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-wkdld" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.404540 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58tlf\" (UniqueName: \"kubernetes.io/projected/10267773-6b24-4b86-9190-a9792b4fa7a6-kube-api-access-58tlf\") pod \"horizon-74959df6fc-ql52b\" (UID: \"10267773-6b24-4b86-9190-a9792b4fa7a6\") " pod="openstack/horizon-74959df6fc-ql52b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.404712 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.415826 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-74959df6fc-ql52b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.435764 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-db-sync-config-data\") pod \"cinder-db-sync-pg9nd\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.435823 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-combined-ca-bundle\") pod \"cinder-db-sync-pg9nd\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.435845 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-scripts\") pod \"cinder-db-sync-pg9nd\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.435907 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-config-data\") pod \"cinder-db-sync-pg9nd\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.435956 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xznn9\" (UniqueName: \"kubernetes.io/projected/128b6d95-fa07-4ab9-a927-47882c406fa3-kube-api-access-xznn9\") pod \"neutron-db-sync-67z26\" (UID: \"128b6d95-fa07-4ab9-a927-47882c406fa3\") " pod="openstack/neutron-db-sync-67z26" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.436031 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/128b6d95-fa07-4ab9-a927-47882c406fa3-combined-ca-bundle\") pod \"neutron-db-sync-67z26\" (UID: \"128b6d95-fa07-4ab9-a927-47882c406fa3\") " pod="openstack/neutron-db-sync-67z26" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.436077 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/76d04662-7576-4f57-aca2-e118e5efd771-etc-machine-id\") pod \"cinder-db-sync-pg9nd\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.436115 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/128b6d95-fa07-4ab9-a927-47882c406fa3-config\") pod \"neutron-db-sync-67z26\" (UID: \"128b6d95-fa07-4ab9-a927-47882c406fa3\") " pod="openstack/neutron-db-sync-67z26" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.436163 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lf7n6\" (UniqueName: \"kubernetes.io/projected/76d04662-7576-4f57-aca2-e118e5efd771-kube-api-access-lf7n6\") pod \"cinder-db-sync-pg9nd\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.443305 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/128b6d95-fa07-4ab9-a927-47882c406fa3-combined-ca-bundle\") pod \"neutron-db-sync-67z26\" (UID: \"128b6d95-fa07-4ab9-a927-47882c406fa3\") " pod="openstack/neutron-db-sync-67z26" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.449941 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/128b6d95-fa07-4ab9-a927-47882c406fa3-config\") pod \"neutron-db-sync-67z26\" (UID: \"128b6d95-fa07-4ab9-a927-47882c406fa3\") " pod="openstack/neutron-db-sync-67z26" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.495521 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-d4fhc"] Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.506149 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-d4fhc" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.512819 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-qfclq" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.514656 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.539545 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/76d04662-7576-4f57-aca2-e118e5efd771-etc-machine-id\") pod \"cinder-db-sync-pg9nd\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.539668 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lf7n6\" (UniqueName: \"kubernetes.io/projected/76d04662-7576-4f57-aca2-e118e5efd771-kube-api-access-lf7n6\") pod \"cinder-db-sync-pg9nd\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.539710 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-db-sync-config-data\") pod \"cinder-db-sync-pg9nd\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.539732 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-combined-ca-bundle\") pod \"cinder-db-sync-pg9nd\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.539755 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-scripts\") pod \"cinder-db-sync-pg9nd\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.539790 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-config-data\") pod \"cinder-db-sync-pg9nd\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.549288 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/76d04662-7576-4f57-aca2-e118e5efd771-etc-machine-id\") pod \"cinder-db-sync-pg9nd\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.610172 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-db-sync-config-data\") pod \"cinder-db-sync-pg9nd\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.612665 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xznn9\" (UniqueName: \"kubernetes.io/projected/128b6d95-fa07-4ab9-a927-47882c406fa3-kube-api-access-xznn9\") pod \"neutron-db-sync-67z26\" (UID: \"128b6d95-fa07-4ab9-a927-47882c406fa3\") " pod="openstack/neutron-db-sync-67z26" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.613530 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-config-data\") pod \"cinder-db-sync-pg9nd\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.613964 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-combined-ca-bundle\") pod \"cinder-db-sync-pg9nd\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.621339 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-scripts\") pod \"cinder-db-sync-pg9nd\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.627751 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lf7n6\" (UniqueName: \"kubernetes.io/projected/76d04662-7576-4f57-aca2-e118e5efd771-kube-api-access-lf7n6\") pod \"cinder-db-sync-pg9nd\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.635705 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6959c47849-57zbw"] Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.637660 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6959c47849-57zbw" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.643135 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjhlp\" (UniqueName: \"kubernetes.io/projected/b36228db-b66d-4815-ac1c-e58b85ee3bbf-kube-api-access-tjhlp\") pod \"barbican-db-sync-d4fhc\" (UID: \"b36228db-b66d-4815-ac1c-e58b85ee3bbf\") " pod="openstack/barbican-db-sync-d4fhc" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.644266 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b36228db-b66d-4815-ac1c-e58b85ee3bbf-db-sync-config-data\") pod \"barbican-db-sync-d4fhc\" (UID: \"b36228db-b66d-4815-ac1c-e58b85ee3bbf\") " pod="openstack/barbican-db-sync-d4fhc" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.644818 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b36228db-b66d-4815-ac1c-e58b85ee3bbf-combined-ca-bundle\") pod \"barbican-db-sync-d4fhc\" (UID: \"b36228db-b66d-4815-ac1c-e58b85ee3bbf\") " pod="openstack/barbican-db-sync-d4fhc" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.718594 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6959c47849-57zbw"] Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.725814 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-d4fhc"] Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.749334 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xcjg\" (UniqueName: \"kubernetes.io/projected/3211e9ba-eb08-403f-9393-1804d73a18c5-kube-api-access-7xcjg\") pod \"horizon-6959c47849-57zbw\" (UID: \"3211e9ba-eb08-403f-9393-1804d73a18c5\") " pod="openstack/horizon-6959c47849-57zbw" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.749423 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3211e9ba-eb08-403f-9393-1804d73a18c5-horizon-secret-key\") pod \"horizon-6959c47849-57zbw\" (UID: \"3211e9ba-eb08-403f-9393-1804d73a18c5\") " pod="openstack/horizon-6959c47849-57zbw" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.749451 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b36228db-b66d-4815-ac1c-e58b85ee3bbf-combined-ca-bundle\") pod \"barbican-db-sync-d4fhc\" (UID: \"b36228db-b66d-4815-ac1c-e58b85ee3bbf\") " pod="openstack/barbican-db-sync-d4fhc" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.749529 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3211e9ba-eb08-403f-9393-1804d73a18c5-config-data\") pod \"horizon-6959c47849-57zbw\" (UID: \"3211e9ba-eb08-403f-9393-1804d73a18c5\") " pod="openstack/horizon-6959c47849-57zbw" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.749565 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3211e9ba-eb08-403f-9393-1804d73a18c5-logs\") pod \"horizon-6959c47849-57zbw\" (UID: \"3211e9ba-eb08-403f-9393-1804d73a18c5\") " pod="openstack/horizon-6959c47849-57zbw" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.749603 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjhlp\" (UniqueName: \"kubernetes.io/projected/b36228db-b66d-4815-ac1c-e58b85ee3bbf-kube-api-access-tjhlp\") pod \"barbican-db-sync-d4fhc\" (UID: \"b36228db-b66d-4815-ac1c-e58b85ee3bbf\") " pod="openstack/barbican-db-sync-d4fhc" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.749641 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b36228db-b66d-4815-ac1c-e58b85ee3bbf-db-sync-config-data\") pod \"barbican-db-sync-d4fhc\" (UID: \"b36228db-b66d-4815-ac1c-e58b85ee3bbf\") " pod="openstack/barbican-db-sync-d4fhc" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.749660 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3211e9ba-eb08-403f-9393-1804d73a18c5-scripts\") pod \"horizon-6959c47849-57zbw\" (UID: \"3211e9ba-eb08-403f-9393-1804d73a18c5\") " pod="openstack/horizon-6959c47849-57zbw" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.765944 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-67z26" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.776835 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b36228db-b66d-4815-ac1c-e58b85ee3bbf-db-sync-config-data\") pod \"barbican-db-sync-d4fhc\" (UID: \"b36228db-b66d-4815-ac1c-e58b85ee3bbf\") " pod="openstack/barbican-db-sync-d4fhc" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.785819 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b36228db-b66d-4815-ac1c-e58b85ee3bbf-combined-ca-bundle\") pod \"barbican-db-sync-d4fhc\" (UID: \"b36228db-b66d-4815-ac1c-e58b85ee3bbf\") " pod="openstack/barbican-db-sync-d4fhc" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.786865 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-rwgqh"] Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.796585 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.797389 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjhlp\" (UniqueName: \"kubernetes.io/projected/b36228db-b66d-4815-ac1c-e58b85ee3bbf-kube-api-access-tjhlp\") pod \"barbican-db-sync-d4fhc\" (UID: \"b36228db-b66d-4815-ac1c-e58b85ee3bbf\") " pod="openstack/barbican-db-sync-d4fhc" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.797474 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.804327 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.808321 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.808500 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.809432 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-jjnnc"] Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.822057 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jjnnc" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.829637 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.829902 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.830050 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-78pgm" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.830601 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.851093 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-jjnnc"] Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.853552 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3211e9ba-eb08-403f-9393-1804d73a18c5-config-data\") pod \"horizon-6959c47849-57zbw\" (UID: \"3211e9ba-eb08-403f-9393-1804d73a18c5\") " pod="openstack/horizon-6959c47849-57zbw" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.863573 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-d4fhc" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.873347 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3211e9ba-eb08-403f-9393-1804d73a18c5-logs\") pod \"horizon-6959c47849-57zbw\" (UID: \"3211e9ba-eb08-403f-9393-1804d73a18c5\") " pod="openstack/horizon-6959c47849-57zbw" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.873453 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.873636 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.873814 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30498343-6254-49c2-8220-9df92217cb8f-log-httpd\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.873920 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3211e9ba-eb08-403f-9393-1804d73a18c5-scripts\") pod \"horizon-6959c47849-57zbw\" (UID: \"3211e9ba-eb08-403f-9393-1804d73a18c5\") " pod="openstack/horizon-6959c47849-57zbw" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.874007 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-scripts\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.874114 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xcjg\" (UniqueName: \"kubernetes.io/projected/3211e9ba-eb08-403f-9393-1804d73a18c5-kube-api-access-7xcjg\") pod \"horizon-6959c47849-57zbw\" (UID: \"3211e9ba-eb08-403f-9393-1804d73a18c5\") " pod="openstack/horizon-6959c47849-57zbw" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.874184 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30498343-6254-49c2-8220-9df92217cb8f-run-httpd\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.874291 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-config-data\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.874435 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldhgk\" (UniqueName: \"kubernetes.io/projected/30498343-6254-49c2-8220-9df92217cb8f-kube-api-access-ldhgk\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.874603 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3211e9ba-eb08-403f-9393-1804d73a18c5-horizon-secret-key\") pod \"horizon-6959c47849-57zbw\" (UID: \"3211e9ba-eb08-403f-9393-1804d73a18c5\") " pod="openstack/horizon-6959c47849-57zbw" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.865871 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3211e9ba-eb08-403f-9393-1804d73a18c5-config-data\") pod \"horizon-6959c47849-57zbw\" (UID: \"3211e9ba-eb08-403f-9393-1804d73a18c5\") " pod="openstack/horizon-6959c47849-57zbw" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.878231 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-68dcc9cf6f-89b8b"] Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.879736 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.910285 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3211e9ba-eb08-403f-9393-1804d73a18c5-scripts\") pod \"horizon-6959c47849-57zbw\" (UID: \"3211e9ba-eb08-403f-9393-1804d73a18c5\") " pod="openstack/horizon-6959c47849-57zbw" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.910373 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68dcc9cf6f-89b8b"] Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.913170 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3211e9ba-eb08-403f-9393-1804d73a18c5-logs\") pod \"horizon-6959c47849-57zbw\" (UID: \"3211e9ba-eb08-403f-9393-1804d73a18c5\") " pod="openstack/horizon-6959c47849-57zbw" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.913678 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3211e9ba-eb08-403f-9393-1804d73a18c5-horizon-secret-key\") pod \"horizon-6959c47849-57zbw\" (UID: \"3211e9ba-eb08-403f-9393-1804d73a18c5\") " pod="openstack/horizon-6959c47849-57zbw" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.941130 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xcjg\" (UniqueName: \"kubernetes.io/projected/3211e9ba-eb08-403f-9393-1804d73a18c5-kube-api-access-7xcjg\") pod \"horizon-6959c47849-57zbw\" (UID: \"3211e9ba-eb08-403f-9393-1804d73a18c5\") " pod="openstack/horizon-6959c47849-57zbw" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.980307 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6959c47849-57zbw" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.985613 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-config-data\") pod \"placement-db-sync-jjnnc\" (UID: \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\") " pod="openstack/placement-db-sync-jjnnc" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.990476 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30498343-6254-49c2-8220-9df92217cb8f-run-httpd\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.990946 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-config-data\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.991167 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30498343-6254-49c2-8220-9df92217cb8f-run-httpd\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.991178 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s99hf\" (UniqueName: \"kubernetes.io/projected/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-kube-api-access-s99hf\") pod \"placement-db-sync-jjnnc\" (UID: \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\") " pod="openstack/placement-db-sync-jjnnc" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.991307 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldhgk\" (UniqueName: \"kubernetes.io/projected/30498343-6254-49c2-8220-9df92217cb8f-kube-api-access-ldhgk\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.991375 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-dns-svc\") pod \"dnsmasq-dns-68dcc9cf6f-89b8b\" (UID: \"afee5cf7-dd4d-490f-be62-3fbea3170858\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.991552 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-ovsdbserver-nb\") pod \"dnsmasq-dns-68dcc9cf6f-89b8b\" (UID: \"afee5cf7-dd4d-490f-be62-3fbea3170858\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.991655 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-config\") pod \"dnsmasq-dns-68dcc9cf6f-89b8b\" (UID: \"afee5cf7-dd4d-490f-be62-3fbea3170858\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.991731 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9z95\" (UniqueName: \"kubernetes.io/projected/afee5cf7-dd4d-490f-be62-3fbea3170858-kube-api-access-x9z95\") pod \"dnsmasq-dns-68dcc9cf6f-89b8b\" (UID: \"afee5cf7-dd4d-490f-be62-3fbea3170858\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.992163 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-scripts\") pod \"placement-db-sync-jjnnc\" (UID: \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\") " pod="openstack/placement-db-sync-jjnnc" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.992374 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.992419 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-logs\") pod \"placement-db-sync-jjnnc\" (UID: \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\") " pod="openstack/placement-db-sync-jjnnc" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.992457 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.992594 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30498343-6254-49c2-8220-9df92217cb8f-log-httpd\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.992656 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-combined-ca-bundle\") pod \"placement-db-sync-jjnnc\" (UID: \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\") " pod="openstack/placement-db-sync-jjnnc" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.992700 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-ovsdbserver-sb\") pod \"dnsmasq-dns-68dcc9cf6f-89b8b\" (UID: \"afee5cf7-dd4d-490f-be62-3fbea3170858\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.992727 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-scripts\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.995296 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30498343-6254-49c2-8220-9df92217cb8f-log-httpd\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.997421 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.998041 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-scripts\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:13 crc kubenswrapper[4906]: I0227 08:50:13.999641 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.001865 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-config-data\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.019837 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldhgk\" (UniqueName: \"kubernetes.io/projected/30498343-6254-49c2-8220-9df92217cb8f-kube-api-access-ldhgk\") pod \"ceilometer-0\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " pod="openstack/ceilometer-0" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.096093 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-logs\") pod \"placement-db-sync-jjnnc\" (UID: \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\") " pod="openstack/placement-db-sync-jjnnc" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.096568 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-combined-ca-bundle\") pod \"placement-db-sync-jjnnc\" (UID: \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\") " pod="openstack/placement-db-sync-jjnnc" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.096590 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-ovsdbserver-sb\") pod \"dnsmasq-dns-68dcc9cf6f-89b8b\" (UID: \"afee5cf7-dd4d-490f-be62-3fbea3170858\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.096619 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-config-data\") pod \"placement-db-sync-jjnnc\" (UID: \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\") " pod="openstack/placement-db-sync-jjnnc" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.096665 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s99hf\" (UniqueName: \"kubernetes.io/projected/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-kube-api-access-s99hf\") pod \"placement-db-sync-jjnnc\" (UID: \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\") " pod="openstack/placement-db-sync-jjnnc" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.096692 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-dns-svc\") pod \"dnsmasq-dns-68dcc9cf6f-89b8b\" (UID: \"afee5cf7-dd4d-490f-be62-3fbea3170858\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.096722 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-ovsdbserver-nb\") pod \"dnsmasq-dns-68dcc9cf6f-89b8b\" (UID: \"afee5cf7-dd4d-490f-be62-3fbea3170858\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.096751 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-config\") pod \"dnsmasq-dns-68dcc9cf6f-89b8b\" (UID: \"afee5cf7-dd4d-490f-be62-3fbea3170858\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.096772 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9z95\" (UniqueName: \"kubernetes.io/projected/afee5cf7-dd4d-490f-be62-3fbea3170858-kube-api-access-x9z95\") pod \"dnsmasq-dns-68dcc9cf6f-89b8b\" (UID: \"afee5cf7-dd4d-490f-be62-3fbea3170858\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.096804 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-scripts\") pod \"placement-db-sync-jjnnc\" (UID: \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\") " pod="openstack/placement-db-sync-jjnnc" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.100360 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-logs\") pod \"placement-db-sync-jjnnc\" (UID: \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\") " pod="openstack/placement-db-sync-jjnnc" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.100850 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-ovsdbserver-nb\") pod \"dnsmasq-dns-68dcc9cf6f-89b8b\" (UID: \"afee5cf7-dd4d-490f-be62-3fbea3170858\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.101356 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-dns-svc\") pod \"dnsmasq-dns-68dcc9cf6f-89b8b\" (UID: \"afee5cf7-dd4d-490f-be62-3fbea3170858\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.101773 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-config\") pod \"dnsmasq-dns-68dcc9cf6f-89b8b\" (UID: \"afee5cf7-dd4d-490f-be62-3fbea3170858\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.102225 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-ovsdbserver-sb\") pod \"dnsmasq-dns-68dcc9cf6f-89b8b\" (UID: \"afee5cf7-dd4d-490f-be62-3fbea3170858\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.103380 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-scripts\") pod \"placement-db-sync-jjnnc\" (UID: \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\") " pod="openstack/placement-db-sync-jjnnc" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.104993 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-config-data\") pod \"placement-db-sync-jjnnc\" (UID: \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\") " pod="openstack/placement-db-sync-jjnnc" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.108184 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-combined-ca-bundle\") pod \"placement-db-sync-jjnnc\" (UID: \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\") " pod="openstack/placement-db-sync-jjnnc" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.121048 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s99hf\" (UniqueName: \"kubernetes.io/projected/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-kube-api-access-s99hf\") pod \"placement-db-sync-jjnnc\" (UID: \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\") " pod="openstack/placement-db-sync-jjnnc" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.124113 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9z95\" (UniqueName: \"kubernetes.io/projected/afee5cf7-dd4d-490f-be62-3fbea3170858-kube-api-access-x9z95\") pod \"dnsmasq-dns-68dcc9cf6f-89b8b\" (UID: \"afee5cf7-dd4d-490f-be62-3fbea3170858\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.164953 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.181756 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jjnnc" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.247491 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.308820 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-rwgqh"] Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.539641 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-m55h8"] Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.586008 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-74959df6fc-ql52b"] Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.684657 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-67z26"] Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.733833 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:50:14 crc kubenswrapper[4906]: E0227 08:50:14.734061 4906 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Feb 27 08:50:14 crc kubenswrapper[4906]: E0227 08:50:14.734081 4906 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Feb 27 08:50:14 crc kubenswrapper[4906]: E0227 08:50:14.734132 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift podName:c98486bd-1325-4072-bce0-a28d38ecead2 nodeName:}" failed. No retries permitted until 2026-02-27 08:51:18.734115902 +0000 UTC m=+1377.128517512 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift") pod "swift-storage-0" (UID: "c98486bd-1325-4072-bce0-a28d38ecead2") : configmap "swift-ring-files" not found Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.772761 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-67z26" event={"ID":"128b6d95-fa07-4ab9-a927-47882c406fa3","Type":"ContainerStarted","Data":"0f60cfa9b7bdaa8e45bdaa9ea57a67691297bf9b4fd9de2da695e56363369eb8"} Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.774634 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-m55h8" event={"ID":"94735536-25c1-47d8-828b-fc5f71d57552","Type":"ContainerStarted","Data":"f7ac5e4ead473802f6c67d23cc49ed395f2a4ded5b9736d3897f0151fff46158"} Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.776983 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-74959df6fc-ql52b" event={"ID":"10267773-6b24-4b86-9190-a9792b4fa7a6","Type":"ContainerStarted","Data":"3ed515c7a0bb18f9eb95b9e0b5536422494c07ffa41230c560e626e22bd3a961"} Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.780514 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" event={"ID":"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6","Type":"ContainerStarted","Data":"5f150a02dca1dec7d4548d521c99e5cd462bf48d9d338f1b692332954d0b5cf5"} Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.879108 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-d4fhc"] Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.889228 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6959c47849-57zbw"] Feb 27 08:50:14 crc kubenswrapper[4906]: I0227 08:50:14.916353 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-pg9nd"] Feb 27 08:50:14 crc kubenswrapper[4906]: W0227 08:50:14.948405 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76d04662_7576_4f57_aca2_e118e5efd771.slice/crio-39c72e0b511ebd5e160b28d4b5c22a3502e4251faa85d0a1b17c0494a32a9786 WatchSource:0}: Error finding container 39c72e0b511ebd5e160b28d4b5c22a3502e4251faa85d0a1b17c0494a32a9786: Status 404 returned error can't find the container with id 39c72e0b511ebd5e160b28d4b5c22a3502e4251faa85d0a1b17c0494a32a9786 Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.058243 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6959c47849-57zbw"] Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.119348 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-84bbf4f557-nfg4z"] Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.123761 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84bbf4f557-nfg4z" Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.143584 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-84bbf4f557-nfg4z"] Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.152248 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-jjnnc"] Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.188900 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68dcc9cf6f-89b8b"] Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.214413 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.254363 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.270069 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d7278414-6040-46e3-ae8e-98e75e0b73ad-config-data\") pod \"horizon-84bbf4f557-nfg4z\" (UID: \"d7278414-6040-46e3-ae8e-98e75e0b73ad\") " pod="openstack/horizon-84bbf4f557-nfg4z" Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.270129 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d7278414-6040-46e3-ae8e-98e75e0b73ad-horizon-secret-key\") pod \"horizon-84bbf4f557-nfg4z\" (UID: \"d7278414-6040-46e3-ae8e-98e75e0b73ad\") " pod="openstack/horizon-84bbf4f557-nfg4z" Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.270231 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qnhvl\" (UniqueName: \"kubernetes.io/projected/d7278414-6040-46e3-ae8e-98e75e0b73ad-kube-api-access-qnhvl\") pod \"horizon-84bbf4f557-nfg4z\" (UID: \"d7278414-6040-46e3-ae8e-98e75e0b73ad\") " pod="openstack/horizon-84bbf4f557-nfg4z" Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.270325 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7278414-6040-46e3-ae8e-98e75e0b73ad-logs\") pod \"horizon-84bbf4f557-nfg4z\" (UID: \"d7278414-6040-46e3-ae8e-98e75e0b73ad\") " pod="openstack/horizon-84bbf4f557-nfg4z" Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.270409 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d7278414-6040-46e3-ae8e-98e75e0b73ad-scripts\") pod \"horizon-84bbf4f557-nfg4z\" (UID: \"d7278414-6040-46e3-ae8e-98e75e0b73ad\") " pod="openstack/horizon-84bbf4f557-nfg4z" Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.373338 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d7278414-6040-46e3-ae8e-98e75e0b73ad-config-data\") pod \"horizon-84bbf4f557-nfg4z\" (UID: \"d7278414-6040-46e3-ae8e-98e75e0b73ad\") " pod="openstack/horizon-84bbf4f557-nfg4z" Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.373416 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d7278414-6040-46e3-ae8e-98e75e0b73ad-horizon-secret-key\") pod \"horizon-84bbf4f557-nfg4z\" (UID: \"d7278414-6040-46e3-ae8e-98e75e0b73ad\") " pod="openstack/horizon-84bbf4f557-nfg4z" Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.373535 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qnhvl\" (UniqueName: \"kubernetes.io/projected/d7278414-6040-46e3-ae8e-98e75e0b73ad-kube-api-access-qnhvl\") pod \"horizon-84bbf4f557-nfg4z\" (UID: \"d7278414-6040-46e3-ae8e-98e75e0b73ad\") " pod="openstack/horizon-84bbf4f557-nfg4z" Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.373580 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7278414-6040-46e3-ae8e-98e75e0b73ad-logs\") pod \"horizon-84bbf4f557-nfg4z\" (UID: \"d7278414-6040-46e3-ae8e-98e75e0b73ad\") " pod="openstack/horizon-84bbf4f557-nfg4z" Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.373690 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d7278414-6040-46e3-ae8e-98e75e0b73ad-scripts\") pod \"horizon-84bbf4f557-nfg4z\" (UID: \"d7278414-6040-46e3-ae8e-98e75e0b73ad\") " pod="openstack/horizon-84bbf4f557-nfg4z" Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.374610 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7278414-6040-46e3-ae8e-98e75e0b73ad-logs\") pod \"horizon-84bbf4f557-nfg4z\" (UID: \"d7278414-6040-46e3-ae8e-98e75e0b73ad\") " pod="openstack/horizon-84bbf4f557-nfg4z" Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.374747 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d7278414-6040-46e3-ae8e-98e75e0b73ad-scripts\") pod \"horizon-84bbf4f557-nfg4z\" (UID: \"d7278414-6040-46e3-ae8e-98e75e0b73ad\") " pod="openstack/horizon-84bbf4f557-nfg4z" Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.375156 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d7278414-6040-46e3-ae8e-98e75e0b73ad-config-data\") pod \"horizon-84bbf4f557-nfg4z\" (UID: \"d7278414-6040-46e3-ae8e-98e75e0b73ad\") " pod="openstack/horizon-84bbf4f557-nfg4z" Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.383786 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d7278414-6040-46e3-ae8e-98e75e0b73ad-horizon-secret-key\") pod \"horizon-84bbf4f557-nfg4z\" (UID: \"d7278414-6040-46e3-ae8e-98e75e0b73ad\") " pod="openstack/horizon-84bbf4f557-nfg4z" Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.399563 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qnhvl\" (UniqueName: \"kubernetes.io/projected/d7278414-6040-46e3-ae8e-98e75e0b73ad-kube-api-access-qnhvl\") pod \"horizon-84bbf4f557-nfg4z\" (UID: \"d7278414-6040-46e3-ae8e-98e75e0b73ad\") " pod="openstack/horizon-84bbf4f557-nfg4z" Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.452379 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84bbf4f557-nfg4z" Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.792468 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jjnnc" event={"ID":"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7","Type":"ContainerStarted","Data":"3e3824cce68035fcd76a3166b79806f0ac6797bcc008e853d1b255218734cf47"} Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.795873 4906 generic.go:334] "Generic (PLEG): container finished" podID="afee5cf7-dd4d-490f-be62-3fbea3170858" containerID="ac3501342ae7773461217efa9a338fc2231373b04af029dda0a99439df4a2c51" exitCode=0 Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.795978 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" event={"ID":"afee5cf7-dd4d-490f-be62-3fbea3170858","Type":"ContainerDied","Data":"ac3501342ae7773461217efa9a338fc2231373b04af029dda0a99439df4a2c51"} Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.796506 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" event={"ID":"afee5cf7-dd4d-490f-be62-3fbea3170858","Type":"ContainerStarted","Data":"ce16fc3b91c4b5ecb23564b9519ac082173980b1e9dc9ed756b246094a4e1a88"} Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.803647 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-67z26" event={"ID":"128b6d95-fa07-4ab9-a927-47882c406fa3","Type":"ContainerStarted","Data":"ff74f8c1c239517610ae82fe9c54e8f2cdc13655273903ddb470f0835bec89f0"} Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.809685 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-pg9nd" event={"ID":"76d04662-7576-4f57-aca2-e118e5efd771","Type":"ContainerStarted","Data":"39c72e0b511ebd5e160b28d4b5c22a3502e4251faa85d0a1b17c0494a32a9786"} Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.838064 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-m55h8" event={"ID":"94735536-25c1-47d8-828b-fc5f71d57552","Type":"ContainerStarted","Data":"30ea85af047d20481ff3464d917fb9df1bef54d6d15e94b1e03be61c3be6f469"} Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.840048 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30498343-6254-49c2-8220-9df92217cb8f","Type":"ContainerStarted","Data":"71d11135b35f0718165afa91c8a2ba7b84e21ffd73415381c3a8dcdfaa174790"} Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.844845 4906 generic.go:334] "Generic (PLEG): container finished" podID="78c1c63c-bd8c-4cb4-9992-ec2b33e743f6" containerID="86660614103fa0cb3503c86a1c0a4dc6a6181d4effbc95094b8ae04da642c70e" exitCode=0 Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.844944 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" event={"ID":"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6","Type":"ContainerDied","Data":"86660614103fa0cb3503c86a1c0a4dc6a6181d4effbc95094b8ae04da642c70e"} Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.865646 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-67z26" podStartSLOduration=2.865618851 podStartE2EDuration="2.865618851s" podCreationTimestamp="2026-02-27 08:50:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:50:15.861508513 +0000 UTC m=+1314.255910143" watchObservedRunningTime="2026-02-27 08:50:15.865618851 +0000 UTC m=+1314.260020461" Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.872226 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6959c47849-57zbw" event={"ID":"3211e9ba-eb08-403f-9393-1804d73a18c5","Type":"ContainerStarted","Data":"07b0ac3b52e3f39abe7651f08044e01c1f9ef25658784c4648ef83c595f66a57"} Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.893909 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-d4fhc" event={"ID":"b36228db-b66d-4815-ac1c-e58b85ee3bbf","Type":"ContainerStarted","Data":"8e1e49708351d22691bae6eb8e86b348618839dadb3cf168f6df8a05f6fd12b0"} Feb 27 08:50:15 crc kubenswrapper[4906]: I0227 08:50:15.894983 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-m55h8" podStartSLOduration=3.894967881 podStartE2EDuration="3.894967881s" podCreationTimestamp="2026-02-27 08:50:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:50:15.884724452 +0000 UTC m=+1314.279126062" watchObservedRunningTime="2026-02-27 08:50:15.894967881 +0000 UTC m=+1314.289369491" Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.002227 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-84bbf4f557-nfg4z"] Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.333002 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.408897 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-ovsdbserver-nb\") pod \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\" (UID: \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\") " Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.408976 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-config\") pod \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\" (UID: \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\") " Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.409034 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-ovsdbserver-sb\") pod \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\" (UID: \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\") " Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.409141 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k54wr\" (UniqueName: \"kubernetes.io/projected/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-kube-api-access-k54wr\") pod \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\" (UID: \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\") " Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.409189 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-dns-svc\") pod \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\" (UID: \"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6\") " Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.447162 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-kube-api-access-k54wr" (OuterVolumeSpecName: "kube-api-access-k54wr") pod "78c1c63c-bd8c-4cb4-9992-ec2b33e743f6" (UID: "78c1c63c-bd8c-4cb4-9992-ec2b33e743f6"). InnerVolumeSpecName "kube-api-access-k54wr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.448425 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "78c1c63c-bd8c-4cb4-9992-ec2b33e743f6" (UID: "78c1c63c-bd8c-4cb4-9992-ec2b33e743f6"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.448939 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "78c1c63c-bd8c-4cb4-9992-ec2b33e743f6" (UID: "78c1c63c-bd8c-4cb4-9992-ec2b33e743f6"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.450025 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "78c1c63c-bd8c-4cb4-9992-ec2b33e743f6" (UID: "78c1c63c-bd8c-4cb4-9992-ec2b33e743f6"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.493915 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-config" (OuterVolumeSpecName: "config") pod "78c1c63c-bd8c-4cb4-9992-ec2b33e743f6" (UID: "78c1c63c-bd8c-4cb4-9992-ec2b33e743f6"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.511604 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k54wr\" (UniqueName: \"kubernetes.io/projected/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-kube-api-access-k54wr\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.511650 4906 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.511663 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.511674 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.511686 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.909156 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" event={"ID":"afee5cf7-dd4d-490f-be62-3fbea3170858","Type":"ContainerStarted","Data":"bafe0dcaa7a2dc47acf5bf79d98ab0d7f301672314f840a0777763c8a01dc4ec"} Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.909560 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.917323 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" event={"ID":"78c1c63c-bd8c-4cb4-9992-ec2b33e743f6","Type":"ContainerDied","Data":"5f150a02dca1dec7d4548d521c99e5cd462bf48d9d338f1b692332954d0b5cf5"} Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.917365 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f877ddd87-rwgqh" Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.917396 4906 scope.go:117] "RemoveContainer" containerID="86660614103fa0cb3503c86a1c0a4dc6a6181d4effbc95094b8ae04da642c70e" Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.920992 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84bbf4f557-nfg4z" event={"ID":"d7278414-6040-46e3-ae8e-98e75e0b73ad","Type":"ContainerStarted","Data":"96d64d621d375b3c0dec709d0d12b0fc69b6b2164290610018d4dbc0d449a8db"} Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.924125 4906 generic.go:334] "Generic (PLEG): container finished" podID="db586541-2471-4a37-a7b6-3c8f324a696b" containerID="436ee7e9f47d89512517e01527c611a34144233f54e765ce6e2320b293ad4074" exitCode=0 Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.924151 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-6jk9r" event={"ID":"db586541-2471-4a37-a7b6-3c8f324a696b","Type":"ContainerDied","Data":"436ee7e9f47d89512517e01527c611a34144233f54e765ce6e2320b293ad4074"} Feb 27 08:50:16 crc kubenswrapper[4906]: I0227 08:50:16.954983 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" podStartSLOduration=3.954955655 podStartE2EDuration="3.954955655s" podCreationTimestamp="2026-02-27 08:50:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:50:16.93797894 +0000 UTC m=+1315.332380550" watchObservedRunningTime="2026-02-27 08:50:16.954955655 +0000 UTC m=+1315.349357265" Feb 27 08:50:17 crc kubenswrapper[4906]: I0227 08:50:17.019379 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-rwgqh"] Feb 27 08:50:17 crc kubenswrapper[4906]: I0227 08:50:17.030570 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-rwgqh"] Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.407620 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.478004 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/db586541-2471-4a37-a7b6-3c8f324a696b-ring-data-devices\") pod \"db586541-2471-4a37-a7b6-3c8f324a696b\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.478067 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/db586541-2471-4a37-a7b6-3c8f324a696b-swiftconf\") pod \"db586541-2471-4a37-a7b6-3c8f324a696b\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.478252 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db586541-2471-4a37-a7b6-3c8f324a696b-combined-ca-bundle\") pod \"db586541-2471-4a37-a7b6-3c8f324a696b\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.478307 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/db586541-2471-4a37-a7b6-3c8f324a696b-dispersionconf\") pod \"db586541-2471-4a37-a7b6-3c8f324a696b\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.478362 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/db586541-2471-4a37-a7b6-3c8f324a696b-etc-swift\") pod \"db586541-2471-4a37-a7b6-3c8f324a696b\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.478448 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5hpk8\" (UniqueName: \"kubernetes.io/projected/db586541-2471-4a37-a7b6-3c8f324a696b-kube-api-access-5hpk8\") pod \"db586541-2471-4a37-a7b6-3c8f324a696b\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.478488 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db586541-2471-4a37-a7b6-3c8f324a696b-scripts\") pod \"db586541-2471-4a37-a7b6-3c8f324a696b\" (UID: \"db586541-2471-4a37-a7b6-3c8f324a696b\") " Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.479296 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db586541-2471-4a37-a7b6-3c8f324a696b-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "db586541-2471-4a37-a7b6-3c8f324a696b" (UID: "db586541-2471-4a37-a7b6-3c8f324a696b"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.479896 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db586541-2471-4a37-a7b6-3c8f324a696b-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "db586541-2471-4a37-a7b6-3c8f324a696b" (UID: "db586541-2471-4a37-a7b6-3c8f324a696b"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.496112 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db586541-2471-4a37-a7b6-3c8f324a696b-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "db586541-2471-4a37-a7b6-3c8f324a696b" (UID: "db586541-2471-4a37-a7b6-3c8f324a696b"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.501186 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db586541-2471-4a37-a7b6-3c8f324a696b-kube-api-access-5hpk8" (OuterVolumeSpecName: "kube-api-access-5hpk8") pod "db586541-2471-4a37-a7b6-3c8f324a696b" (UID: "db586541-2471-4a37-a7b6-3c8f324a696b"). InnerVolumeSpecName "kube-api-access-5hpk8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.516361 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db586541-2471-4a37-a7b6-3c8f324a696b-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "db586541-2471-4a37-a7b6-3c8f324a696b" (UID: "db586541-2471-4a37-a7b6-3c8f324a696b"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.524671 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/db586541-2471-4a37-a7b6-3c8f324a696b-scripts" (OuterVolumeSpecName: "scripts") pod "db586541-2471-4a37-a7b6-3c8f324a696b" (UID: "db586541-2471-4a37-a7b6-3c8f324a696b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.539671 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/db586541-2471-4a37-a7b6-3c8f324a696b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "db586541-2471-4a37-a7b6-3c8f324a696b" (UID: "db586541-2471-4a37-a7b6-3c8f324a696b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.567002 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78c1c63c-bd8c-4cb4-9992-ec2b33e743f6" path="/var/lib/kubelet/pods/78c1c63c-bd8c-4cb4-9992-ec2b33e743f6/volumes" Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.581285 4906 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/db586541-2471-4a37-a7b6-3c8f324a696b-dispersionconf\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.581329 4906 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/db586541-2471-4a37-a7b6-3c8f324a696b-etc-swift\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.581340 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5hpk8\" (UniqueName: \"kubernetes.io/projected/db586541-2471-4a37-a7b6-3c8f324a696b-kube-api-access-5hpk8\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.581353 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db586541-2471-4a37-a7b6-3c8f324a696b-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.581364 4906 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/db586541-2471-4a37-a7b6-3c8f324a696b-ring-data-devices\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.581373 4906 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/db586541-2471-4a37-a7b6-3c8f324a696b-swiftconf\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.581385 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db586541-2471-4a37-a7b6-3c8f324a696b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.966905 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-6jk9r" event={"ID":"db586541-2471-4a37-a7b6-3c8f324a696b","Type":"ContainerDied","Data":"4167f53294c9406f0bf868cfedf142ed259d77488664892db3868186655908ef"} Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.966950 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4167f53294c9406f0bf868cfedf142ed259d77488664892db3868186655908ef" Feb 27 08:50:18 crc kubenswrapper[4906]: I0227 08:50:18.967015 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-6jk9r" Feb 27 08:50:21 crc kubenswrapper[4906]: I0227 08:50:21.835692 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-74959df6fc-ql52b"] Feb 27 08:50:21 crc kubenswrapper[4906]: I0227 08:50:21.863641 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-98c78d5f8-j9wmn"] Feb 27 08:50:21 crc kubenswrapper[4906]: E0227 08:50:21.864189 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db586541-2471-4a37-a7b6-3c8f324a696b" containerName="swift-ring-rebalance" Feb 27 08:50:21 crc kubenswrapper[4906]: I0227 08:50:21.864217 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="db586541-2471-4a37-a7b6-3c8f324a696b" containerName="swift-ring-rebalance" Feb 27 08:50:21 crc kubenswrapper[4906]: E0227 08:50:21.864410 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78c1c63c-bd8c-4cb4-9992-ec2b33e743f6" containerName="init" Feb 27 08:50:21 crc kubenswrapper[4906]: I0227 08:50:21.864424 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="78c1c63c-bd8c-4cb4-9992-ec2b33e743f6" containerName="init" Feb 27 08:50:21 crc kubenswrapper[4906]: I0227 08:50:21.864586 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="78c1c63c-bd8c-4cb4-9992-ec2b33e743f6" containerName="init" Feb 27 08:50:21 crc kubenswrapper[4906]: I0227 08:50:21.864613 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="db586541-2471-4a37-a7b6-3c8f324a696b" containerName="swift-ring-rebalance" Feb 27 08:50:21 crc kubenswrapper[4906]: I0227 08:50:21.865537 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:21 crc kubenswrapper[4906]: I0227 08:50:21.872463 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Feb 27 08:50:21 crc kubenswrapper[4906]: I0227 08:50:21.901995 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-98c78d5f8-j9wmn"] Feb 27 08:50:21 crc kubenswrapper[4906]: I0227 08:50:21.936668 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-84bbf4f557-nfg4z"] Feb 27 08:50:21 crc kubenswrapper[4906]: I0227 08:50:21.961778 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e6ab1c10-b552-4a69-94c7-68280ab7e126-scripts\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:21 crc kubenswrapper[4906]: I0227 08:50:21.961835 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e6ab1c10-b552-4a69-94c7-68280ab7e126-horizon-secret-key\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:21 crc kubenswrapper[4906]: I0227 08:50:21.961862 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbzfz\" (UniqueName: \"kubernetes.io/projected/e6ab1c10-b552-4a69-94c7-68280ab7e126-kube-api-access-vbzfz\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:21 crc kubenswrapper[4906]: I0227 08:50:21.961960 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6ab1c10-b552-4a69-94c7-68280ab7e126-horizon-tls-certs\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:21 crc kubenswrapper[4906]: I0227 08:50:21.962006 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e6ab1c10-b552-4a69-94c7-68280ab7e126-config-data\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:21 crc kubenswrapper[4906]: I0227 08:50:21.962023 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6ab1c10-b552-4a69-94c7-68280ab7e126-combined-ca-bundle\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:21 crc kubenswrapper[4906]: I0227 08:50:21.962057 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6ab1c10-b552-4a69-94c7-68280ab7e126-logs\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:21 crc kubenswrapper[4906]: I0227 08:50:21.967396 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7f78987f9b-lzmw8"] Feb 27 08:50:21 crc kubenswrapper[4906]: I0227 08:50:21.975798 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:21 crc kubenswrapper[4906]: I0227 08:50:21.978496 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7f78987f9b-lzmw8"] Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.065050 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ld5ch\" (UniqueName: \"kubernetes.io/projected/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-kube-api-access-ld5ch\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.065126 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-logs\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.065196 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-combined-ca-bundle\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.065274 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6ab1c10-b552-4a69-94c7-68280ab7e126-horizon-tls-certs\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.065324 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-scripts\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.065401 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e6ab1c10-b552-4a69-94c7-68280ab7e126-config-data\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.065418 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6ab1c10-b552-4a69-94c7-68280ab7e126-combined-ca-bundle\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.065653 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6ab1c10-b552-4a69-94c7-68280ab7e126-logs\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.066155 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6ab1c10-b552-4a69-94c7-68280ab7e126-logs\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.067116 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e6ab1c10-b552-4a69-94c7-68280ab7e126-config-data\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.067172 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-horizon-tls-certs\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.067241 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-config-data\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.067294 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-horizon-secret-key\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.067341 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e6ab1c10-b552-4a69-94c7-68280ab7e126-scripts\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.067365 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e6ab1c10-b552-4a69-94c7-68280ab7e126-horizon-secret-key\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.067389 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbzfz\" (UniqueName: \"kubernetes.io/projected/e6ab1c10-b552-4a69-94c7-68280ab7e126-kube-api-access-vbzfz\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.068144 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e6ab1c10-b552-4a69-94c7-68280ab7e126-scripts\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.072953 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e6ab1c10-b552-4a69-94c7-68280ab7e126-horizon-secret-key\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.073024 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6ab1c10-b552-4a69-94c7-68280ab7e126-horizon-tls-certs\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.073626 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6ab1c10-b552-4a69-94c7-68280ab7e126-combined-ca-bundle\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.087342 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbzfz\" (UniqueName: \"kubernetes.io/projected/e6ab1c10-b552-4a69-94c7-68280ab7e126-kube-api-access-vbzfz\") pod \"horizon-98c78d5f8-j9wmn\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.169963 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-horizon-tls-certs\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.170021 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-config-data\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.170058 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-horizon-secret-key\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.170141 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ld5ch\" (UniqueName: \"kubernetes.io/projected/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-kube-api-access-ld5ch\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.170168 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-logs\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.170210 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-combined-ca-bundle\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.170241 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-scripts\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.171484 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-scripts\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.172740 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-logs\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.173322 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-config-data\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.175519 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-horizon-tls-certs\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.176446 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-combined-ca-bundle\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.176603 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-horizon-secret-key\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.198413 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.235470 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ld5ch\" (UniqueName: \"kubernetes.io/projected/6dc6534b-d5ec-4c53-bfc1-aae2389e3755-kube-api-access-ld5ch\") pod \"horizon-7f78987f9b-lzmw8\" (UID: \"6dc6534b-d5ec-4c53-bfc1-aae2389e3755\") " pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.306845 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:50:22 crc kubenswrapper[4906]: W0227 08:50:22.700509 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode6ab1c10_b552_4a69_94c7_68280ab7e126.slice/crio-4642c6624d1f0b126e2ec3628e2760819a678733aece46dc44970c4450c747df WatchSource:0}: Error finding container 4642c6624d1f0b126e2ec3628e2760819a678733aece46dc44970c4450c747df: Status 404 returned error can't find the container with id 4642c6624d1f0b126e2ec3628e2760819a678733aece46dc44970c4450c747df Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.706814 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-98c78d5f8-j9wmn"] Feb 27 08:50:22 crc kubenswrapper[4906]: I0227 08:50:22.843005 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7f78987f9b-lzmw8"] Feb 27 08:50:23 crc kubenswrapper[4906]: I0227 08:50:23.014784 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-98c78d5f8-j9wmn" event={"ID":"e6ab1c10-b552-4a69-94c7-68280ab7e126","Type":"ContainerStarted","Data":"4642c6624d1f0b126e2ec3628e2760819a678733aece46dc44970c4450c747df"} Feb 27 08:50:23 crc kubenswrapper[4906]: I0227 08:50:23.016316 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7f78987f9b-lzmw8" event={"ID":"6dc6534b-d5ec-4c53-bfc1-aae2389e3755","Type":"ContainerStarted","Data":"d23f3b652537f5b48ac76f2e77823b209fa5f8dd789ea8fe270ccc737ca5dbb6"} Feb 27 08:50:24 crc kubenswrapper[4906]: I0227 08:50:24.250196 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:50:24 crc kubenswrapper[4906]: I0227 08:50:24.343616 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-jdnzx"] Feb 27 08:50:24 crc kubenswrapper[4906]: I0227 08:50:24.343904 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-jdnzx" podUID="a0cd4824-5d93-46a6-bb94-95a74d8eaeba" containerName="dnsmasq-dns" containerID="cri-o://ffbe1d16da29543e957d74d620229ed94f6508e5e376f5472869610979f02ab3" gracePeriod=10 Feb 27 08:50:24 crc kubenswrapper[4906]: I0227 08:50:24.843471 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-jdnzx" podUID="a0cd4824-5d93-46a6-bb94-95a74d8eaeba" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.123:5353: connect: connection refused" Feb 27 08:50:24 crc kubenswrapper[4906]: I0227 08:50:24.844089 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:50:24 crc kubenswrapper[4906]: I0227 08:50:24.844173 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:50:26 crc kubenswrapper[4906]: I0227 08:50:26.049411 4906 generic.go:334] "Generic (PLEG): container finished" podID="a0cd4824-5d93-46a6-bb94-95a74d8eaeba" containerID="ffbe1d16da29543e957d74d620229ed94f6508e5e376f5472869610979f02ab3" exitCode=0 Feb 27 08:50:26 crc kubenswrapper[4906]: I0227 08:50:26.049572 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-jdnzx" event={"ID":"a0cd4824-5d93-46a6-bb94-95a74d8eaeba","Type":"ContainerDied","Data":"ffbe1d16da29543e957d74d620229ed94f6508e5e376f5472869610979f02ab3"} Feb 27 08:50:29 crc kubenswrapper[4906]: I0227 08:50:29.088110 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-m55h8" event={"ID":"94735536-25c1-47d8-828b-fc5f71d57552","Type":"ContainerDied","Data":"30ea85af047d20481ff3464d917fb9df1bef54d6d15e94b1e03be61c3be6f469"} Feb 27 08:50:29 crc kubenswrapper[4906]: I0227 08:50:29.088228 4906 generic.go:334] "Generic (PLEG): container finished" podID="94735536-25c1-47d8-828b-fc5f71d57552" containerID="30ea85af047d20481ff3464d917fb9df1bef54d6d15e94b1e03be61c3be6f469" exitCode=0 Feb 27 08:50:34 crc kubenswrapper[4906]: E0227 08:50:34.429363 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Feb 27 08:50:34 crc kubenswrapper[4906]: E0227 08:50:34.430559 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n57bh5dbhf4hcdh567h59ch567hd8hbfh665h547h7ch74h56fhf5h88h5b6h586h665h9dh66fh9ch648h648h655h65fh57h648h598h97h5fbh68cq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qnhvl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-84bbf4f557-nfg4z_openstack(d7278414-6040-46e3-ae8e-98e75e0b73ad): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:50:34 crc kubenswrapper[4906]: E0227 08:50:34.434201 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-84bbf4f557-nfg4z" podUID="d7278414-6040-46e3-ae8e-98e75e0b73ad" Feb 27 08:50:34 crc kubenswrapper[4906]: E0227 08:50:34.445681 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Feb 27 08:50:34 crc kubenswrapper[4906]: E0227 08:50:34.446052 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5cch655hc9h5bfh64bhf4h5c5h64dhcdh594h5f9h689h5b9h5cfh7dhf7h79h647h677h669h6ch67h99h5bbh9bh85h55fhfhddh664h99h667q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7xcjg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-6959c47849-57zbw_openstack(3211e9ba-eb08-403f-9393-1804d73a18c5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:50:34 crc kubenswrapper[4906]: E0227 08:50:34.449675 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-6959c47849-57zbw" podUID="3211e9ba-eb08-403f-9393-1804d73a18c5" Feb 27 08:50:34 crc kubenswrapper[4906]: I0227 08:50:34.512496 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:50:34 crc kubenswrapper[4906]: I0227 08:50:34.541818 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-ovsdbserver-sb\") pod \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\" (UID: \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\") " Feb 27 08:50:34 crc kubenswrapper[4906]: I0227 08:50:34.541912 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-ovsdbserver-nb\") pod \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\" (UID: \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\") " Feb 27 08:50:34 crc kubenswrapper[4906]: I0227 08:50:34.541959 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-dns-svc\") pod \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\" (UID: \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\") " Feb 27 08:50:34 crc kubenswrapper[4906]: I0227 08:50:34.542080 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-config\") pod \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\" (UID: \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\") " Feb 27 08:50:34 crc kubenswrapper[4906]: I0227 08:50:34.542109 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mk762\" (UniqueName: \"kubernetes.io/projected/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-kube-api-access-mk762\") pod \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\" (UID: \"a0cd4824-5d93-46a6-bb94-95a74d8eaeba\") " Feb 27 08:50:34 crc kubenswrapper[4906]: I0227 08:50:34.559630 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-kube-api-access-mk762" (OuterVolumeSpecName: "kube-api-access-mk762") pod "a0cd4824-5d93-46a6-bb94-95a74d8eaeba" (UID: "a0cd4824-5d93-46a6-bb94-95a74d8eaeba"). InnerVolumeSpecName "kube-api-access-mk762". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:50:34 crc kubenswrapper[4906]: I0227 08:50:34.593939 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-config" (OuterVolumeSpecName: "config") pod "a0cd4824-5d93-46a6-bb94-95a74d8eaeba" (UID: "a0cd4824-5d93-46a6-bb94-95a74d8eaeba"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:50:34 crc kubenswrapper[4906]: I0227 08:50:34.600626 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a0cd4824-5d93-46a6-bb94-95a74d8eaeba" (UID: "a0cd4824-5d93-46a6-bb94-95a74d8eaeba"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:50:34 crc kubenswrapper[4906]: I0227 08:50:34.611522 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a0cd4824-5d93-46a6-bb94-95a74d8eaeba" (UID: "a0cd4824-5d93-46a6-bb94-95a74d8eaeba"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:50:34 crc kubenswrapper[4906]: I0227 08:50:34.624337 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a0cd4824-5d93-46a6-bb94-95a74d8eaeba" (UID: "a0cd4824-5d93-46a6-bb94-95a74d8eaeba"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:50:34 crc kubenswrapper[4906]: I0227 08:50:34.644426 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:34 crc kubenswrapper[4906]: I0227 08:50:34.644475 4906 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:34 crc kubenswrapper[4906]: I0227 08:50:34.644506 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:34 crc kubenswrapper[4906]: I0227 08:50:34.644517 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mk762\" (UniqueName: \"kubernetes.io/projected/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-kube-api-access-mk762\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:34 crc kubenswrapper[4906]: I0227 08:50:34.644531 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a0cd4824-5d93-46a6-bb94-95a74d8eaeba-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:34 crc kubenswrapper[4906]: I0227 08:50:34.840631 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-jdnzx" podUID="a0cd4824-5d93-46a6-bb94-95a74d8eaeba" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.123:5353: i/o timeout" Feb 27 08:50:35 crc kubenswrapper[4906]: I0227 08:50:35.148375 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-jdnzx" event={"ID":"a0cd4824-5d93-46a6-bb94-95a74d8eaeba","Type":"ContainerDied","Data":"ee35b25216807ae78c06b997d40adda8382b4f8358bc509838aeeee63f129d0b"} Feb 27 08:50:35 crc kubenswrapper[4906]: I0227 08:50:35.148499 4906 scope.go:117] "RemoveContainer" containerID="ffbe1d16da29543e957d74d620229ed94f6508e5e376f5472869610979f02ab3" Feb 27 08:50:35 crc kubenswrapper[4906]: I0227 08:50:35.148493 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-jdnzx" Feb 27 08:50:35 crc kubenswrapper[4906]: I0227 08:50:35.220986 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-jdnzx"] Feb 27 08:50:35 crc kubenswrapper[4906]: I0227 08:50:35.228961 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-jdnzx"] Feb 27 08:50:36 crc kubenswrapper[4906]: I0227 08:50:36.564839 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0cd4824-5d93-46a6-bb94-95a74d8eaeba" path="/var/lib/kubelet/pods/a0cd4824-5d93-46a6-bb94-95a74d8eaeba/volumes" Feb 27 08:50:41 crc kubenswrapper[4906]: E0227 08:50:41.292603 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Feb 27 08:50:41 crc kubenswrapper[4906]: E0227 08:50:41.293688 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-s99hf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-jjnnc_openstack(3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:50:41 crc kubenswrapper[4906]: E0227 08:50:41.294953 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-jjnnc" podUID="3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7" Feb 27 08:50:42 crc kubenswrapper[4906]: E0227 08:50:42.234594 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-jjnnc" podUID="3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7" Feb 27 08:50:46 crc kubenswrapper[4906]: I0227 08:50:46.799338 4906 scope.go:117] "RemoveContainer" containerID="39126b2860e040002b9da8ecaa22519f453ac1405289cde078ae26f470ef55fb" Feb 27 08:50:48 crc kubenswrapper[4906]: E0227 08:50:48.907296 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Feb 27 08:50:48 crc kubenswrapper[4906]: E0227 08:50:48.908069 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n54h57h5b5h7dhbch659h97h6dh68bh9fh5b8h68fh5f9hf5h77h558h559h564h5dch556h68ch7dhf6h66bh589h68bh66bh89h685h577h65bh5c6q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-58tlf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-74959df6fc-ql52b_openstack(10267773-6b24-4b86-9190-a9792b4fa7a6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:50:48 crc kubenswrapper[4906]: E0227 08:50:48.911072 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-74959df6fc-ql52b" podUID="10267773-6b24-4b86-9190-a9792b4fa7a6" Feb 27 08:50:49 crc kubenswrapper[4906]: E0227 08:50:49.313535 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Feb 27 08:50:49 crc kubenswrapper[4906]: E0227 08:50:49.313941 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nb4h5cch5b7h9fh594h588h677h679h94h8dh56h96h8dh555h574h5c4hdh54h75h5c4h87h67dh56dh84h577h74h66bh54dh54dh77h66fh5bdq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ld5ch,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-7f78987f9b-lzmw8_openstack(6dc6534b-d5ec-4c53-bfc1-aae2389e3755): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:50:49 crc kubenswrapper[4906]: E0227 08:50:49.319865 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Feb 27 08:50:49 crc kubenswrapper[4906]: E0227 08:50:49.320478 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tjhlp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-d4fhc_openstack(b36228db-b66d-4815-ac1c-e58b85ee3bbf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:50:49 crc kubenswrapper[4906]: E0227 08:50:49.321765 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-d4fhc" podUID="b36228db-b66d-4815-ac1c-e58b85ee3bbf" Feb 27 08:50:49 crc kubenswrapper[4906]: E0227 08:50:49.328441 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Feb 27 08:50:49 crc kubenswrapper[4906]: E0227 08:50:49.328684 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n564h5d8h649hdhc5h65bh77h56bh684hdh76h67fh5bfhf9h56ch5dfh84hc5hc7h5dbh7h5c4h5d5h78h5f7h566h687h575h647h54h57ch65cq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vbzfz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-98c78d5f8-j9wmn_openstack(e6ab1c10-b552-4a69-94c7-68280ab7e126): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:50:49 crc kubenswrapper[4906]: E0227 08:50:49.335644 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-7f78987f9b-lzmw8" podUID="6dc6534b-d5ec-4c53-bfc1-aae2389e3755" Feb 27 08:50:49 crc kubenswrapper[4906]: E0227 08:50:49.341005 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-98c78d5f8-j9wmn" podUID="e6ab1c10-b552-4a69-94c7-68280ab7e126" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.390901 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.400403 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84bbf4f557-nfg4z" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.419768 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6959c47849-57zbw" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.495130 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-scripts\") pod \"94735536-25c1-47d8-828b-fc5f71d57552\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.495217 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g77zv\" (UniqueName: \"kubernetes.io/projected/94735536-25c1-47d8-828b-fc5f71d57552-kube-api-access-g77zv\") pod \"94735536-25c1-47d8-828b-fc5f71d57552\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.495281 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-config-data\") pod \"94735536-25c1-47d8-828b-fc5f71d57552\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.495349 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-fernet-keys\") pod \"94735536-25c1-47d8-828b-fc5f71d57552\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.495374 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-combined-ca-bundle\") pod \"94735536-25c1-47d8-828b-fc5f71d57552\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.495432 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-credential-keys\") pod \"94735536-25c1-47d8-828b-fc5f71d57552\" (UID: \"94735536-25c1-47d8-828b-fc5f71d57552\") " Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.505286 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "94735536-25c1-47d8-828b-fc5f71d57552" (UID: "94735536-25c1-47d8-828b-fc5f71d57552"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.505398 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-scripts" (OuterVolumeSpecName: "scripts") pod "94735536-25c1-47d8-828b-fc5f71d57552" (UID: "94735536-25c1-47d8-828b-fc5f71d57552"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.505573 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94735536-25c1-47d8-828b-fc5f71d57552-kube-api-access-g77zv" (OuterVolumeSpecName: "kube-api-access-g77zv") pod "94735536-25c1-47d8-828b-fc5f71d57552" (UID: "94735536-25c1-47d8-828b-fc5f71d57552"). InnerVolumeSpecName "kube-api-access-g77zv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.507515 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "94735536-25c1-47d8-828b-fc5f71d57552" (UID: "94735536-25c1-47d8-828b-fc5f71d57552"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.570302 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-config-data" (OuterVolumeSpecName: "config-data") pod "94735536-25c1-47d8-828b-fc5f71d57552" (UID: "94735536-25c1-47d8-828b-fc5f71d57552"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.581370 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "94735536-25c1-47d8-828b-fc5f71d57552" (UID: "94735536-25c1-47d8-828b-fc5f71d57552"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.598122 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d7278414-6040-46e3-ae8e-98e75e0b73ad-scripts\") pod \"d7278414-6040-46e3-ae8e-98e75e0b73ad\" (UID: \"d7278414-6040-46e3-ae8e-98e75e0b73ad\") " Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.598190 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d7278414-6040-46e3-ae8e-98e75e0b73ad-horizon-secret-key\") pod \"d7278414-6040-46e3-ae8e-98e75e0b73ad\" (UID: \"d7278414-6040-46e3-ae8e-98e75e0b73ad\") " Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.598269 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3211e9ba-eb08-403f-9393-1804d73a18c5-horizon-secret-key\") pod \"3211e9ba-eb08-403f-9393-1804d73a18c5\" (UID: \"3211e9ba-eb08-403f-9393-1804d73a18c5\") " Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.598383 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d7278414-6040-46e3-ae8e-98e75e0b73ad-config-data\") pod \"d7278414-6040-46e3-ae8e-98e75e0b73ad\" (UID: \"d7278414-6040-46e3-ae8e-98e75e0b73ad\") " Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.598435 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3211e9ba-eb08-403f-9393-1804d73a18c5-logs\") pod \"3211e9ba-eb08-403f-9393-1804d73a18c5\" (UID: \"3211e9ba-eb08-403f-9393-1804d73a18c5\") " Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.598505 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qnhvl\" (UniqueName: \"kubernetes.io/projected/d7278414-6040-46e3-ae8e-98e75e0b73ad-kube-api-access-qnhvl\") pod \"d7278414-6040-46e3-ae8e-98e75e0b73ad\" (UID: \"d7278414-6040-46e3-ae8e-98e75e0b73ad\") " Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.598586 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7278414-6040-46e3-ae8e-98e75e0b73ad-logs\") pod \"d7278414-6040-46e3-ae8e-98e75e0b73ad\" (UID: \"d7278414-6040-46e3-ae8e-98e75e0b73ad\") " Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.598703 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3211e9ba-eb08-403f-9393-1804d73a18c5-config-data\") pod \"3211e9ba-eb08-403f-9393-1804d73a18c5\" (UID: \"3211e9ba-eb08-403f-9393-1804d73a18c5\") " Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.598866 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3211e9ba-eb08-403f-9393-1804d73a18c5-scripts\") pod \"3211e9ba-eb08-403f-9393-1804d73a18c5\" (UID: \"3211e9ba-eb08-403f-9393-1804d73a18c5\") " Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.598943 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7xcjg\" (UniqueName: \"kubernetes.io/projected/3211e9ba-eb08-403f-9393-1804d73a18c5-kube-api-access-7xcjg\") pod \"3211e9ba-eb08-403f-9393-1804d73a18c5\" (UID: \"3211e9ba-eb08-403f-9393-1804d73a18c5\") " Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.599543 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g77zv\" (UniqueName: \"kubernetes.io/projected/94735536-25c1-47d8-828b-fc5f71d57552-kube-api-access-g77zv\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.599569 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.599581 4906 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.599581 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3211e9ba-eb08-403f-9393-1804d73a18c5-logs" (OuterVolumeSpecName: "logs") pod "3211e9ba-eb08-403f-9393-1804d73a18c5" (UID: "3211e9ba-eb08-403f-9393-1804d73a18c5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.600030 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7278414-6040-46e3-ae8e-98e75e0b73ad-scripts" (OuterVolumeSpecName: "scripts") pod "d7278414-6040-46e3-ae8e-98e75e0b73ad" (UID: "d7278414-6040-46e3-ae8e-98e75e0b73ad"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.601695 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d7278414-6040-46e3-ae8e-98e75e0b73ad-logs" (OuterVolumeSpecName: "logs") pod "d7278414-6040-46e3-ae8e-98e75e0b73ad" (UID: "d7278414-6040-46e3-ae8e-98e75e0b73ad"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.602785 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3211e9ba-eb08-403f-9393-1804d73a18c5-config-data" (OuterVolumeSpecName: "config-data") pod "3211e9ba-eb08-403f-9393-1804d73a18c5" (UID: "3211e9ba-eb08-403f-9393-1804d73a18c5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.602785 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7278414-6040-46e3-ae8e-98e75e0b73ad-config-data" (OuterVolumeSpecName: "config-data") pod "d7278414-6040-46e3-ae8e-98e75e0b73ad" (UID: "d7278414-6040-46e3-ae8e-98e75e0b73ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.603572 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d7278414-6040-46e3-ae8e-98e75e0b73ad-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "d7278414-6040-46e3-ae8e-98e75e0b73ad" (UID: "d7278414-6040-46e3-ae8e-98e75e0b73ad"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.603709 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3211e9ba-eb08-403f-9393-1804d73a18c5-scripts" (OuterVolumeSpecName: "scripts") pod "3211e9ba-eb08-403f-9393-1804d73a18c5" (UID: "3211e9ba-eb08-403f-9393-1804d73a18c5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.599593 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.604846 4906 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.604860 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94735536-25c1-47d8-828b-fc5f71d57552-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.606440 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7278414-6040-46e3-ae8e-98e75e0b73ad-kube-api-access-qnhvl" (OuterVolumeSpecName: "kube-api-access-qnhvl") pod "d7278414-6040-46e3-ae8e-98e75e0b73ad" (UID: "d7278414-6040-46e3-ae8e-98e75e0b73ad"). InnerVolumeSpecName "kube-api-access-qnhvl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.607208 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3211e9ba-eb08-403f-9393-1804d73a18c5-kube-api-access-7xcjg" (OuterVolumeSpecName: "kube-api-access-7xcjg") pod "3211e9ba-eb08-403f-9393-1804d73a18c5" (UID: "3211e9ba-eb08-403f-9393-1804d73a18c5"). InnerVolumeSpecName "kube-api-access-7xcjg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.607635 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3211e9ba-eb08-403f-9393-1804d73a18c5-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "3211e9ba-eb08-403f-9393-1804d73a18c5" (UID: "3211e9ba-eb08-403f-9393-1804d73a18c5"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.708309 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/3211e9ba-eb08-403f-9393-1804d73a18c5-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.708370 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/3211e9ba-eb08-403f-9393-1804d73a18c5-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.708386 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7xcjg\" (UniqueName: \"kubernetes.io/projected/3211e9ba-eb08-403f-9393-1804d73a18c5-kube-api-access-7xcjg\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.708402 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d7278414-6040-46e3-ae8e-98e75e0b73ad-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.708413 4906 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/d7278414-6040-46e3-ae8e-98e75e0b73ad-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.708426 4906 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/3211e9ba-eb08-403f-9393-1804d73a18c5-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.708439 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/d7278414-6040-46e3-ae8e-98e75e0b73ad-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.708454 4906 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3211e9ba-eb08-403f-9393-1804d73a18c5-logs\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.708466 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qnhvl\" (UniqueName: \"kubernetes.io/projected/d7278414-6040-46e3-ae8e-98e75e0b73ad-kube-api-access-qnhvl\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:49 crc kubenswrapper[4906]: I0227 08:50:49.708480 4906 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d7278414-6040-46e3-ae8e-98e75e0b73ad-logs\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:49 crc kubenswrapper[4906]: E0227 08:50:49.890477 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Feb 27 08:50:49 crc kubenswrapper[4906]: E0227 08:50:49.890744 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5c5h5b6h5f4h66dh678h5dh6bhc9h589h66ch55h595h6fhc6h65dh97h666h676h559hd9h5d7h685h688h59bh5bch67dh68fh658h9fh567h597h66fq,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ldhgk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(30498343-6254-49c2-8220-9df92217cb8f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.313301 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84bbf4f557-nfg4z" event={"ID":"d7278414-6040-46e3-ae8e-98e75e0b73ad","Type":"ContainerDied","Data":"96d64d621d375b3c0dec709d0d12b0fc69b6b2164290610018d4dbc0d449a8db"} Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.313332 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84bbf4f557-nfg4z" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.317712 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6959c47849-57zbw" event={"ID":"3211e9ba-eb08-403f-9393-1804d73a18c5","Type":"ContainerDied","Data":"07b0ac3b52e3f39abe7651f08044e01c1f9ef25658784c4648ef83c595f66a57"} Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.317720 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6959c47849-57zbw" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.322918 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-m55h8" event={"ID":"94735536-25c1-47d8-828b-fc5f71d57552","Type":"ContainerDied","Data":"f7ac5e4ead473802f6c67d23cc49ed395f2a4ded5b9736d3897f0151fff46158"} Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.322971 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-m55h8" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.322976 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7ac5e4ead473802f6c67d23cc49ed395f2a4ded5b9736d3897f0151fff46158" Feb 27 08:50:50 crc kubenswrapper[4906]: E0227 08:50:50.332383 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-d4fhc" podUID="b36228db-b66d-4815-ac1c-e58b85ee3bbf" Feb 27 08:50:50 crc kubenswrapper[4906]: E0227 08:50:50.334437 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-7f78987f9b-lzmw8" podUID="6dc6534b-d5ec-4c53-bfc1-aae2389e3755" Feb 27 08:50:50 crc kubenswrapper[4906]: E0227 08:50:50.334735 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-98c78d5f8-j9wmn" podUID="e6ab1c10-b552-4a69-94c7-68280ab7e126" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.449715 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-84bbf4f557-nfg4z"] Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.459190 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-84bbf4f557-nfg4z"] Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.493176 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6959c47849-57zbw"] Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.502083 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6959c47849-57zbw"] Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.510558 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-m55h8"] Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.517806 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-m55h8"] Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.603805 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3211e9ba-eb08-403f-9393-1804d73a18c5" path="/var/lib/kubelet/pods/3211e9ba-eb08-403f-9393-1804d73a18c5/volumes" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.604706 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94735536-25c1-47d8-828b-fc5f71d57552" path="/var/lib/kubelet/pods/94735536-25c1-47d8-828b-fc5f71d57552/volumes" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.605691 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7278414-6040-46e3-ae8e-98e75e0b73ad" path="/var/lib/kubelet/pods/d7278414-6040-46e3-ae8e-98e75e0b73ad/volumes" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.625740 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-q4qfq"] Feb 27 08:50:50 crc kubenswrapper[4906]: E0227 08:50:50.626454 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0cd4824-5d93-46a6-bb94-95a74d8eaeba" containerName="init" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.626485 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0cd4824-5d93-46a6-bb94-95a74d8eaeba" containerName="init" Feb 27 08:50:50 crc kubenswrapper[4906]: E0227 08:50:50.626500 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0cd4824-5d93-46a6-bb94-95a74d8eaeba" containerName="dnsmasq-dns" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.626510 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0cd4824-5d93-46a6-bb94-95a74d8eaeba" containerName="dnsmasq-dns" Feb 27 08:50:50 crc kubenswrapper[4906]: E0227 08:50:50.626533 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94735536-25c1-47d8-828b-fc5f71d57552" containerName="keystone-bootstrap" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.626544 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="94735536-25c1-47d8-828b-fc5f71d57552" containerName="keystone-bootstrap" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.627777 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0cd4824-5d93-46a6-bb94-95a74d8eaeba" containerName="dnsmasq-dns" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.627822 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="94735536-25c1-47d8-828b-fc5f71d57552" containerName="keystone-bootstrap" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.629713 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.632632 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-credential-keys\") pod \"keystone-bootstrap-q4qfq\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.632773 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-scripts\") pod \"keystone-bootstrap-q4qfq\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.632989 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqc6j\" (UniqueName: \"kubernetes.io/projected/aaaf3558-92de-434c-825f-cb5a828a19fe-kube-api-access-sqc6j\") pod \"keystone-bootstrap-q4qfq\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.633084 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-combined-ca-bundle\") pod \"keystone-bootstrap-q4qfq\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.633193 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-fernet-keys\") pod \"keystone-bootstrap-q4qfq\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.633235 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-config-data\") pod \"keystone-bootstrap-q4qfq\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.633439 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.633621 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.636728 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.642461 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-x96zn" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.643167 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.658954 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-q4qfq"] Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.735270 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqc6j\" (UniqueName: \"kubernetes.io/projected/aaaf3558-92de-434c-825f-cb5a828a19fe-kube-api-access-sqc6j\") pod \"keystone-bootstrap-q4qfq\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.735390 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-combined-ca-bundle\") pod \"keystone-bootstrap-q4qfq\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.735467 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-fernet-keys\") pod \"keystone-bootstrap-q4qfq\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.735501 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-config-data\") pod \"keystone-bootstrap-q4qfq\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.735571 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-credential-keys\") pod \"keystone-bootstrap-q4qfq\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.735622 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-scripts\") pod \"keystone-bootstrap-q4qfq\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.742263 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-combined-ca-bundle\") pod \"keystone-bootstrap-q4qfq\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.745633 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-config-data\") pod \"keystone-bootstrap-q4qfq\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.754384 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-scripts\") pod \"keystone-bootstrap-q4qfq\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.754912 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-credential-keys\") pod \"keystone-bootstrap-q4qfq\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.755309 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-fernet-keys\") pod \"keystone-bootstrap-q4qfq\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.758280 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqc6j\" (UniqueName: \"kubernetes.io/projected/aaaf3558-92de-434c-825f-cb5a828a19fe-kube-api-access-sqc6j\") pod \"keystone-bootstrap-q4qfq\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:50 crc kubenswrapper[4906]: I0227 08:50:50.955624 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.201121 4906 scope.go:117] "RemoveContainer" containerID="747541d750590e9a263f47272ccb6729e4ca0a5e89779a9084477c8b6d9e9ec1" Feb 27 08:50:51 crc kubenswrapper[4906]: E0227 08:50:51.216686 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Feb 27 08:50:51 crc kubenswrapper[4906]: E0227 08:50:51.216953 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lf7n6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-pg9nd_openstack(76d04662-7576-4f57-aca2-e118e5efd771): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 08:50:51 crc kubenswrapper[4906]: E0227 08:50:51.218754 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-pg9nd" podUID="76d04662-7576-4f57-aca2-e118e5efd771" Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.289581 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-74959df6fc-ql52b" Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.356339 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-74959df6fc-ql52b" Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.356956 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-74959df6fc-ql52b" event={"ID":"10267773-6b24-4b86-9190-a9792b4fa7a6","Type":"ContainerDied","Data":"3ed515c7a0bb18f9eb95b9e0b5536422494c07ffa41230c560e626e22bd3a961"} Feb 27 08:50:51 crc kubenswrapper[4906]: E0227 08:50:51.372433 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-pg9nd" podUID="76d04662-7576-4f57-aca2-e118e5efd771" Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.450072 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/10267773-6b24-4b86-9190-a9792b4fa7a6-scripts\") pod \"10267773-6b24-4b86-9190-a9792b4fa7a6\" (UID: \"10267773-6b24-4b86-9190-a9792b4fa7a6\") " Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.450165 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/10267773-6b24-4b86-9190-a9792b4fa7a6-horizon-secret-key\") pod \"10267773-6b24-4b86-9190-a9792b4fa7a6\" (UID: \"10267773-6b24-4b86-9190-a9792b4fa7a6\") " Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.450319 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58tlf\" (UniqueName: \"kubernetes.io/projected/10267773-6b24-4b86-9190-a9792b4fa7a6-kube-api-access-58tlf\") pod \"10267773-6b24-4b86-9190-a9792b4fa7a6\" (UID: \"10267773-6b24-4b86-9190-a9792b4fa7a6\") " Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.450359 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10267773-6b24-4b86-9190-a9792b4fa7a6-logs\") pod \"10267773-6b24-4b86-9190-a9792b4fa7a6\" (UID: \"10267773-6b24-4b86-9190-a9792b4fa7a6\") " Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.450519 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/10267773-6b24-4b86-9190-a9792b4fa7a6-config-data\") pod \"10267773-6b24-4b86-9190-a9792b4fa7a6\" (UID: \"10267773-6b24-4b86-9190-a9792b4fa7a6\") " Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.451816 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10267773-6b24-4b86-9190-a9792b4fa7a6-scripts" (OuterVolumeSpecName: "scripts") pod "10267773-6b24-4b86-9190-a9792b4fa7a6" (UID: "10267773-6b24-4b86-9190-a9792b4fa7a6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.452788 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/10267773-6b24-4b86-9190-a9792b4fa7a6-config-data" (OuterVolumeSpecName: "config-data") pod "10267773-6b24-4b86-9190-a9792b4fa7a6" (UID: "10267773-6b24-4b86-9190-a9792b4fa7a6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.453317 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10267773-6b24-4b86-9190-a9792b4fa7a6-logs" (OuterVolumeSpecName: "logs") pod "10267773-6b24-4b86-9190-a9792b4fa7a6" (UID: "10267773-6b24-4b86-9190-a9792b4fa7a6"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.457520 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/10267773-6b24-4b86-9190-a9792b4fa7a6-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.457845 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/10267773-6b24-4b86-9190-a9792b4fa7a6-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "10267773-6b24-4b86-9190-a9792b4fa7a6" (UID: "10267773-6b24-4b86-9190-a9792b4fa7a6"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.457870 4906 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/10267773-6b24-4b86-9190-a9792b4fa7a6-logs\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.457949 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/10267773-6b24-4b86-9190-a9792b4fa7a6-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.458582 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10267773-6b24-4b86-9190-a9792b4fa7a6-kube-api-access-58tlf" (OuterVolumeSpecName: "kube-api-access-58tlf") pod "10267773-6b24-4b86-9190-a9792b4fa7a6" (UID: "10267773-6b24-4b86-9190-a9792b4fa7a6"). InnerVolumeSpecName "kube-api-access-58tlf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.560419 4906 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/10267773-6b24-4b86-9190-a9792b4fa7a6-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.560815 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58tlf\" (UniqueName: \"kubernetes.io/projected/10267773-6b24-4b86-9190-a9792b4fa7a6-kube-api-access-58tlf\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.732175 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-q4qfq"] Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.741364 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-74959df6fc-ql52b"] Feb 27 08:50:51 crc kubenswrapper[4906]: I0227 08:50:51.751727 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-74959df6fc-ql52b"] Feb 27 08:50:51 crc kubenswrapper[4906]: W0227 08:50:51.911842 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaaaf3558_92de_434c_825f_cb5a828a19fe.slice/crio-a5f13e469693be3de78161c7a7b0d7e97d702bfa3d4a14c7d02a065d8920e531 WatchSource:0}: Error finding container a5f13e469693be3de78161c7a7b0d7e97d702bfa3d4a14c7d02a065d8920e531: Status 404 returned error can't find the container with id a5f13e469693be3de78161c7a7b0d7e97d702bfa3d4a14c7d02a065d8920e531 Feb 27 08:50:52 crc kubenswrapper[4906]: I0227 08:50:52.366531 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q4qfq" event={"ID":"aaaf3558-92de-434c-825f-cb5a828a19fe","Type":"ContainerStarted","Data":"cfc93b65268e8c5f4dc0495ac26f3b5aac10ecf8e070fdee100aeb5197266e83"} Feb 27 08:50:52 crc kubenswrapper[4906]: I0227 08:50:52.367078 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q4qfq" event={"ID":"aaaf3558-92de-434c-825f-cb5a828a19fe","Type":"ContainerStarted","Data":"a5f13e469693be3de78161c7a7b0d7e97d702bfa3d4a14c7d02a065d8920e531"} Feb 27 08:50:52 crc kubenswrapper[4906]: I0227 08:50:52.368657 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30498343-6254-49c2-8220-9df92217cb8f","Type":"ContainerStarted","Data":"b6d1f7754d20d1bbedc3a306957ed20f39c9658c5e93b3850ede31e6b6d7a451"} Feb 27 08:50:52 crc kubenswrapper[4906]: I0227 08:50:52.370641 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-sgch9" event={"ID":"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa","Type":"ContainerStarted","Data":"72df2c35f2b9841c7cebfdf6f3a99c7d0b8409815b6af3a1c36acb589987223f"} Feb 27 08:50:52 crc kubenswrapper[4906]: I0227 08:50:52.406156 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-q4qfq" podStartSLOduration=2.406122184 podStartE2EDuration="2.406122184s" podCreationTimestamp="2026-02-27 08:50:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:50:52.388718307 +0000 UTC m=+1350.783119917" watchObservedRunningTime="2026-02-27 08:50:52.406122184 +0000 UTC m=+1350.800523794" Feb 27 08:50:52 crc kubenswrapper[4906]: I0227 08:50:52.418733 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-sgch9" podStartSLOduration=2.817926558 podStartE2EDuration="1m10.418700504s" podCreationTimestamp="2026-02-27 08:49:42 +0000 UTC" firstStartedPulling="2026-02-27 08:49:43.638243935 +0000 UTC m=+1282.032645545" lastFinishedPulling="2026-02-27 08:50:51.239017881 +0000 UTC m=+1349.633419491" observedRunningTime="2026-02-27 08:50:52.408916587 +0000 UTC m=+1350.803318197" watchObservedRunningTime="2026-02-27 08:50:52.418700504 +0000 UTC m=+1350.813102114" Feb 27 08:50:52 crc kubenswrapper[4906]: I0227 08:50:52.563112 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10267773-6b24-4b86-9190-a9792b4fa7a6" path="/var/lib/kubelet/pods/10267773-6b24-4b86-9190-a9792b4fa7a6/volumes" Feb 27 08:50:54 crc kubenswrapper[4906]: I0227 08:50:54.845823 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:50:54 crc kubenswrapper[4906]: I0227 08:50:54.846659 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:50:54 crc kubenswrapper[4906]: I0227 08:50:54.846803 4906 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:50:54 crc kubenswrapper[4906]: I0227 08:50:54.848652 4906 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5fcba02c6ff9fc89671410ba4e06ef0b888d1413d1b9d9a87dec063811640cb4"} pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 27 08:50:54 crc kubenswrapper[4906]: I0227 08:50:54.848726 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" containerID="cri-o://5fcba02c6ff9fc89671410ba4e06ef0b888d1413d1b9d9a87dec063811640cb4" gracePeriod=600 Feb 27 08:50:55 crc kubenswrapper[4906]: I0227 08:50:55.401759 4906 generic.go:334] "Generic (PLEG): container finished" podID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerID="5fcba02c6ff9fc89671410ba4e06ef0b888d1413d1b9d9a87dec063811640cb4" exitCode=0 Feb 27 08:50:55 crc kubenswrapper[4906]: I0227 08:50:55.401854 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerDied","Data":"5fcba02c6ff9fc89671410ba4e06ef0b888d1413d1b9d9a87dec063811640cb4"} Feb 27 08:50:55 crc kubenswrapper[4906]: I0227 08:50:55.401932 4906 scope.go:117] "RemoveContainer" containerID="bb55c20fecc20d21bdfb369cf4ea10a10466e88d9a7657f3958a0393ee619f89" Feb 27 08:50:55 crc kubenswrapper[4906]: I0227 08:50:55.405027 4906 generic.go:334] "Generic (PLEG): container finished" podID="aaaf3558-92de-434c-825f-cb5a828a19fe" containerID="cfc93b65268e8c5f4dc0495ac26f3b5aac10ecf8e070fdee100aeb5197266e83" exitCode=0 Feb 27 08:50:55 crc kubenswrapper[4906]: I0227 08:50:55.405061 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q4qfq" event={"ID":"aaaf3558-92de-434c-825f-cb5a828a19fe","Type":"ContainerDied","Data":"cfc93b65268e8c5f4dc0495ac26f3b5aac10ecf8e070fdee100aeb5197266e83"} Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.177344 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.190075 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-fernet-keys\") pod \"aaaf3558-92de-434c-825f-cb5a828a19fe\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.190194 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-credential-keys\") pod \"aaaf3558-92de-434c-825f-cb5a828a19fe\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.190215 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sqc6j\" (UniqueName: \"kubernetes.io/projected/aaaf3558-92de-434c-825f-cb5a828a19fe-kube-api-access-sqc6j\") pod \"aaaf3558-92de-434c-825f-cb5a828a19fe\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.190272 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-combined-ca-bundle\") pod \"aaaf3558-92de-434c-825f-cb5a828a19fe\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.190321 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-config-data\") pod \"aaaf3558-92de-434c-825f-cb5a828a19fe\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.190427 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-scripts\") pod \"aaaf3558-92de-434c-825f-cb5a828a19fe\" (UID: \"aaaf3558-92de-434c-825f-cb5a828a19fe\") " Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.204964 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aaaf3558-92de-434c-825f-cb5a828a19fe-kube-api-access-sqc6j" (OuterVolumeSpecName: "kube-api-access-sqc6j") pod "aaaf3558-92de-434c-825f-cb5a828a19fe" (UID: "aaaf3558-92de-434c-825f-cb5a828a19fe"). InnerVolumeSpecName "kube-api-access-sqc6j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.206189 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "aaaf3558-92de-434c-825f-cb5a828a19fe" (UID: "aaaf3558-92de-434c-825f-cb5a828a19fe"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.206188 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-scripts" (OuterVolumeSpecName: "scripts") pod "aaaf3558-92de-434c-825f-cb5a828a19fe" (UID: "aaaf3558-92de-434c-825f-cb5a828a19fe"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.208324 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "aaaf3558-92de-434c-825f-cb5a828a19fe" (UID: "aaaf3558-92de-434c-825f-cb5a828a19fe"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.258498 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aaaf3558-92de-434c-825f-cb5a828a19fe" (UID: "aaaf3558-92de-434c-825f-cb5a828a19fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.259058 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-config-data" (OuterVolumeSpecName: "config-data") pod "aaaf3558-92de-434c-825f-cb5a828a19fe" (UID: "aaaf3558-92de-434c-825f-cb5a828a19fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.293098 4906 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.293577 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sqc6j\" (UniqueName: \"kubernetes.io/projected/aaaf3558-92de-434c-825f-cb5a828a19fe-kube-api-access-sqc6j\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.293591 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.293600 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.293610 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.293619 4906 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/aaaf3558-92de-434c-825f-cb5a828a19fe-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.488595 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerStarted","Data":"400ed111d9890552c3d11872a2da1327403afcb99497b3740757d1aed8ae0dbb"} Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.490790 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-q4qfq" event={"ID":"aaaf3558-92de-434c-825f-cb5a828a19fe","Type":"ContainerDied","Data":"a5f13e469693be3de78161c7a7b0d7e97d702bfa3d4a14c7d02a065d8920e531"} Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.490856 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5f13e469693be3de78161c7a7b0d7e97d702bfa3d4a14c7d02a065d8920e531" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.490982 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-q4qfq" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.494431 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jjnnc" event={"ID":"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7","Type":"ContainerStarted","Data":"074aab155c138dea42273142d3dd99e2b677ea08a3edf628dad71e9e6b3a0b15"} Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.497157 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30498343-6254-49c2-8220-9df92217cb8f","Type":"ContainerStarted","Data":"c008eba5964801287e746052e23aec6b88482120d86e46da7b4d9250c8f87062"} Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.552371 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6d6854c956-6hqvk"] Feb 27 08:50:57 crc kubenswrapper[4906]: E0227 08:50:57.552864 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aaaf3558-92de-434c-825f-cb5a828a19fe" containerName="keystone-bootstrap" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.552898 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="aaaf3558-92de-434c-825f-cb5a828a19fe" containerName="keystone-bootstrap" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.553094 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="aaaf3558-92de-434c-825f-cb5a828a19fe" containerName="keystone-bootstrap" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.553740 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.557168 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.557467 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.557698 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.557921 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-x96zn" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.561014 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.561131 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.564646 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6d6854c956-6hqvk"] Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.568074 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-jjnnc" podStartSLOduration=2.773439655 podStartE2EDuration="44.568049444s" podCreationTimestamp="2026-02-27 08:50:13 +0000 UTC" firstStartedPulling="2026-02-27 08:50:15.174549035 +0000 UTC m=+1313.568950645" lastFinishedPulling="2026-02-27 08:50:56.969158824 +0000 UTC m=+1355.363560434" observedRunningTime="2026-02-27 08:50:57.550622787 +0000 UTC m=+1355.945024407" watchObservedRunningTime="2026-02-27 08:50:57.568049444 +0000 UTC m=+1355.962451054" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.601077 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96c7t\" (UniqueName: \"kubernetes.io/projected/6ca291d5-b655-4e18-8bc7-738194a54582-kube-api-access-96c7t\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.601156 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-internal-tls-certs\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.601194 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-combined-ca-bundle\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.601209 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-config-data\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.601295 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-fernet-keys\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.601362 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-credential-keys\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.601386 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-scripts\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.601428 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-public-tls-certs\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.703250 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-credential-keys\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.703322 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-scripts\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.703350 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-public-tls-certs\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.703472 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96c7t\" (UniqueName: \"kubernetes.io/projected/6ca291d5-b655-4e18-8bc7-738194a54582-kube-api-access-96c7t\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.703552 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-internal-tls-certs\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.703580 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-combined-ca-bundle\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.703600 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-config-data\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.703685 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-fernet-keys\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.711060 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-config-data\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.711254 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-credential-keys\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.712021 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-fernet-keys\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.713906 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-internal-tls-certs\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.723821 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-public-tls-certs\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.733172 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-scripts\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.733723 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6ca291d5-b655-4e18-8bc7-738194a54582-combined-ca-bundle\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.745579 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96c7t\" (UniqueName: \"kubernetes.io/projected/6ca291d5-b655-4e18-8bc7-738194a54582-kube-api-access-96c7t\") pod \"keystone-6d6854c956-6hqvk\" (UID: \"6ca291d5-b655-4e18-8bc7-738194a54582\") " pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:57 crc kubenswrapper[4906]: I0227 08:50:57.878303 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:58 crc kubenswrapper[4906]: I0227 08:50:58.507923 4906 generic.go:334] "Generic (PLEG): container finished" podID="128b6d95-fa07-4ab9-a927-47882c406fa3" containerID="ff74f8c1c239517610ae82fe9c54e8f2cdc13655273903ddb470f0835bec89f0" exitCode=0 Feb 27 08:50:58 crc kubenswrapper[4906]: I0227 08:50:58.509351 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-67z26" event={"ID":"128b6d95-fa07-4ab9-a927-47882c406fa3","Type":"ContainerDied","Data":"ff74f8c1c239517610ae82fe9c54e8f2cdc13655273903ddb470f0835bec89f0"} Feb 27 08:50:58 crc kubenswrapper[4906]: I0227 08:50:58.518166 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6d6854c956-6hqvk"] Feb 27 08:50:59 crc kubenswrapper[4906]: I0227 08:50:59.527494 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6d6854c956-6hqvk" event={"ID":"6ca291d5-b655-4e18-8bc7-738194a54582","Type":"ContainerStarted","Data":"19990ff826e9450e1a70fc2b1bad1f1efc4b5ba44d52d910a0343c968c7be6ce"} Feb 27 08:50:59 crc kubenswrapper[4906]: I0227 08:50:59.528414 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:50:59 crc kubenswrapper[4906]: I0227 08:50:59.528434 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6d6854c956-6hqvk" event={"ID":"6ca291d5-b655-4e18-8bc7-738194a54582","Type":"ContainerStarted","Data":"e33f9dc2d85a0413c105493f489f48adbc8ac482938c71d7e8e2a23ec728925f"} Feb 27 08:50:59 crc kubenswrapper[4906]: I0227 08:50:59.566812 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-6d6854c956-6hqvk" podStartSLOduration=2.5667839409999997 podStartE2EDuration="2.566783941s" podCreationTimestamp="2026-02-27 08:50:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:50:59.553568564 +0000 UTC m=+1357.947970174" watchObservedRunningTime="2026-02-27 08:50:59.566783941 +0000 UTC m=+1357.961185561" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.008828 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-67z26" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.088703 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/128b6d95-fa07-4ab9-a927-47882c406fa3-config\") pod \"128b6d95-fa07-4ab9-a927-47882c406fa3\" (UID: \"128b6d95-fa07-4ab9-a927-47882c406fa3\") " Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.089017 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/128b6d95-fa07-4ab9-a927-47882c406fa3-combined-ca-bundle\") pod \"128b6d95-fa07-4ab9-a927-47882c406fa3\" (UID: \"128b6d95-fa07-4ab9-a927-47882c406fa3\") " Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.089209 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xznn9\" (UniqueName: \"kubernetes.io/projected/128b6d95-fa07-4ab9-a927-47882c406fa3-kube-api-access-xznn9\") pod \"128b6d95-fa07-4ab9-a927-47882c406fa3\" (UID: \"128b6d95-fa07-4ab9-a927-47882c406fa3\") " Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.098547 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/128b6d95-fa07-4ab9-a927-47882c406fa3-kube-api-access-xznn9" (OuterVolumeSpecName: "kube-api-access-xznn9") pod "128b6d95-fa07-4ab9-a927-47882c406fa3" (UID: "128b6d95-fa07-4ab9-a927-47882c406fa3"). InnerVolumeSpecName "kube-api-access-xznn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:51:00 crc kubenswrapper[4906]: E0227 08:51:00.180101 4906 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/128b6d95-fa07-4ab9-a927-47882c406fa3-combined-ca-bundle podName:128b6d95-fa07-4ab9-a927-47882c406fa3 nodeName:}" failed. No retries permitted until 2026-02-27 08:51:00.680031027 +0000 UTC m=+1359.074432637 (durationBeforeRetry 500ms). Error: error cleaning subPath mounts for volume "combined-ca-bundle" (UniqueName: "kubernetes.io/secret/128b6d95-fa07-4ab9-a927-47882c406fa3-combined-ca-bundle") pod "128b6d95-fa07-4ab9-a927-47882c406fa3" (UID: "128b6d95-fa07-4ab9-a927-47882c406fa3") : error deleting /var/lib/kubelet/pods/128b6d95-fa07-4ab9-a927-47882c406fa3/volume-subpaths: remove /var/lib/kubelet/pods/128b6d95-fa07-4ab9-a927-47882c406fa3/volume-subpaths: no such file or directory Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.187772 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/128b6d95-fa07-4ab9-a927-47882c406fa3-config" (OuterVolumeSpecName: "config") pod "128b6d95-fa07-4ab9-a927-47882c406fa3" (UID: "128b6d95-fa07-4ab9-a927-47882c406fa3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.191094 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xznn9\" (UniqueName: \"kubernetes.io/projected/128b6d95-fa07-4ab9-a927-47882c406fa3-kube-api-access-xznn9\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.191144 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/128b6d95-fa07-4ab9-a927-47882c406fa3-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.538847 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-67z26" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.538847 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-67z26" event={"ID":"128b6d95-fa07-4ab9-a927-47882c406fa3","Type":"ContainerDied","Data":"0f60cfa9b7bdaa8e45bdaa9ea57a67691297bf9b4fd9de2da695e56363369eb8"} Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.539444 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f60cfa9b7bdaa8e45bdaa9ea57a67691297bf9b4fd9de2da695e56363369eb8" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.699732 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/128b6d95-fa07-4ab9-a927-47882c406fa3-combined-ca-bundle\") pod \"128b6d95-fa07-4ab9-a927-47882c406fa3\" (UID: \"128b6d95-fa07-4ab9-a927-47882c406fa3\") " Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.704415 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/128b6d95-fa07-4ab9-a927-47882c406fa3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "128b6d95-fa07-4ab9-a927-47882c406fa3" (UID: "128b6d95-fa07-4ab9-a927-47882c406fa3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.750958 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-54b684dc7c-bw6mb"] Feb 27 08:51:00 crc kubenswrapper[4906]: E0227 08:51:00.751388 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="128b6d95-fa07-4ab9-a927-47882c406fa3" containerName="neutron-db-sync" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.751408 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="128b6d95-fa07-4ab9-a927-47882c406fa3" containerName="neutron-db-sync" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.751553 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="128b6d95-fa07-4ab9-a927-47882c406fa3" containerName="neutron-db-sync" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.752509 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.806469 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fm97c\" (UniqueName: \"kubernetes.io/projected/95b14dbd-64e2-49e4-94b2-90b8dbd96242-kube-api-access-fm97c\") pod \"dnsmasq-dns-54b684dc7c-bw6mb\" (UID: \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\") " pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.806633 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-dns-svc\") pod \"dnsmasq-dns-54b684dc7c-bw6mb\" (UID: \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\") " pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.806667 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-ovsdbserver-nb\") pod \"dnsmasq-dns-54b684dc7c-bw6mb\" (UID: \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\") " pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.806718 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-config\") pod \"dnsmasq-dns-54b684dc7c-bw6mb\" (UID: \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\") " pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.806751 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-ovsdbserver-sb\") pod \"dnsmasq-dns-54b684dc7c-bw6mb\" (UID: \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\") " pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.806929 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/128b6d95-fa07-4ab9-a927-47882c406fa3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.884027 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54b684dc7c-bw6mb"] Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.911283 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-config\") pod \"dnsmasq-dns-54b684dc7c-bw6mb\" (UID: \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\") " pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.911356 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-ovsdbserver-sb\") pod \"dnsmasq-dns-54b684dc7c-bw6mb\" (UID: \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\") " pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.911452 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fm97c\" (UniqueName: \"kubernetes.io/projected/95b14dbd-64e2-49e4-94b2-90b8dbd96242-kube-api-access-fm97c\") pod \"dnsmasq-dns-54b684dc7c-bw6mb\" (UID: \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\") " pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.911521 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-dns-svc\") pod \"dnsmasq-dns-54b684dc7c-bw6mb\" (UID: \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\") " pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.911537 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-ovsdbserver-nb\") pod \"dnsmasq-dns-54b684dc7c-bw6mb\" (UID: \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\") " pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.914281 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-ovsdbserver-nb\") pod \"dnsmasq-dns-54b684dc7c-bw6mb\" (UID: \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\") " pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.915902 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-ovsdbserver-sb\") pod \"dnsmasq-dns-54b684dc7c-bw6mb\" (UID: \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\") " pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.918292 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-config\") pod \"dnsmasq-dns-54b684dc7c-bw6mb\" (UID: \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\") " pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.919444 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-dns-svc\") pod \"dnsmasq-dns-54b684dc7c-bw6mb\" (UID: \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\") " pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.942902 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5f95d46d88-tlzqg"] Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.944683 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.950531 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-48hf2" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.955035 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.960058 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fm97c\" (UniqueName: \"kubernetes.io/projected/95b14dbd-64e2-49e4-94b2-90b8dbd96242-kube-api-access-fm97c\") pod \"dnsmasq-dns-54b684dc7c-bw6mb\" (UID: \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\") " pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.960771 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.962368 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Feb 27 08:51:00 crc kubenswrapper[4906]: I0227 08:51:00.993518 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5f95d46d88-tlzqg"] Feb 27 08:51:01 crc kubenswrapper[4906]: I0227 08:51:01.012690 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-ovndb-tls-certs\") pod \"neutron-5f95d46d88-tlzqg\" (UID: \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\") " pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:01 crc kubenswrapper[4906]: I0227 08:51:01.012765 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqwlp\" (UniqueName: \"kubernetes.io/projected/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-kube-api-access-kqwlp\") pod \"neutron-5f95d46d88-tlzqg\" (UID: \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\") " pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:01 crc kubenswrapper[4906]: I0227 08:51:01.012795 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-httpd-config\") pod \"neutron-5f95d46d88-tlzqg\" (UID: \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\") " pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:01 crc kubenswrapper[4906]: I0227 08:51:01.012821 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-combined-ca-bundle\") pod \"neutron-5f95d46d88-tlzqg\" (UID: \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\") " pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:01 crc kubenswrapper[4906]: I0227 08:51:01.012854 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-config\") pod \"neutron-5f95d46d88-tlzqg\" (UID: \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\") " pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:01 crc kubenswrapper[4906]: I0227 08:51:01.117014 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-ovndb-tls-certs\") pod \"neutron-5f95d46d88-tlzqg\" (UID: \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\") " pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:01 crc kubenswrapper[4906]: I0227 08:51:01.117096 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqwlp\" (UniqueName: \"kubernetes.io/projected/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-kube-api-access-kqwlp\") pod \"neutron-5f95d46d88-tlzqg\" (UID: \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\") " pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:01 crc kubenswrapper[4906]: I0227 08:51:01.117131 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-httpd-config\") pod \"neutron-5f95d46d88-tlzqg\" (UID: \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\") " pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:01 crc kubenswrapper[4906]: I0227 08:51:01.117166 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-combined-ca-bundle\") pod \"neutron-5f95d46d88-tlzqg\" (UID: \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\") " pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:01 crc kubenswrapper[4906]: I0227 08:51:01.117205 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-config\") pod \"neutron-5f95d46d88-tlzqg\" (UID: \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\") " pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:01 crc kubenswrapper[4906]: I0227 08:51:01.124022 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-ovndb-tls-certs\") pod \"neutron-5f95d46d88-tlzqg\" (UID: \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\") " pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:01 crc kubenswrapper[4906]: I0227 08:51:01.127619 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-config\") pod \"neutron-5f95d46d88-tlzqg\" (UID: \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\") " pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:01 crc kubenswrapper[4906]: I0227 08:51:01.128346 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:01 crc kubenswrapper[4906]: I0227 08:51:01.128744 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-combined-ca-bundle\") pod \"neutron-5f95d46d88-tlzqg\" (UID: \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\") " pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:01 crc kubenswrapper[4906]: I0227 08:51:01.128910 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-httpd-config\") pod \"neutron-5f95d46d88-tlzqg\" (UID: \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\") " pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:01 crc kubenswrapper[4906]: I0227 08:51:01.148083 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqwlp\" (UniqueName: \"kubernetes.io/projected/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-kube-api-access-kqwlp\") pod \"neutron-5f95d46d88-tlzqg\" (UID: \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\") " pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:01 crc kubenswrapper[4906]: I0227 08:51:01.353250 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:01 crc kubenswrapper[4906]: I0227 08:51:01.711408 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54b684dc7c-bw6mb"] Feb 27 08:51:01 crc kubenswrapper[4906]: W0227 08:51:01.714295 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod95b14dbd_64e2_49e4_94b2_90b8dbd96242.slice/crio-11f3fd959815705e646d820b7a868c803e8e42580ecf442453f7c8a0f09e7b10 WatchSource:0}: Error finding container 11f3fd959815705e646d820b7a868c803e8e42580ecf442453f7c8a0f09e7b10: Status 404 returned error can't find the container with id 11f3fd959815705e646d820b7a868c803e8e42580ecf442453f7c8a0f09e7b10 Feb 27 08:51:02 crc kubenswrapper[4906]: I0227 08:51:02.382140 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5f95d46d88-tlzqg"] Feb 27 08:51:02 crc kubenswrapper[4906]: W0227 08:51:02.386464 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3de3e5d7_e4c1_4616_bf5a_fb0e0926bce4.slice/crio-d890e754f86bef0288dc98e4fe5510ca1ecb2f6843657fc7b00599d15baeba68 WatchSource:0}: Error finding container d890e754f86bef0288dc98e4fe5510ca1ecb2f6843657fc7b00599d15baeba68: Status 404 returned error can't find the container with id d890e754f86bef0288dc98e4fe5510ca1ecb2f6843657fc7b00599d15baeba68 Feb 27 08:51:02 crc kubenswrapper[4906]: I0227 08:51:02.576479 4906 generic.go:334] "Generic (PLEG): container finished" podID="95b14dbd-64e2-49e4-94b2-90b8dbd96242" containerID="cbef2bbfc4fcd494d2453c52a9afafaadcee24e8f20a47c47c716ce4dc86d1ed" exitCode=0 Feb 27 08:51:02 crc kubenswrapper[4906]: I0227 08:51:02.576603 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" event={"ID":"95b14dbd-64e2-49e4-94b2-90b8dbd96242","Type":"ContainerDied","Data":"cbef2bbfc4fcd494d2453c52a9afafaadcee24e8f20a47c47c716ce4dc86d1ed"} Feb 27 08:51:02 crc kubenswrapper[4906]: I0227 08:51:02.577100 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" event={"ID":"95b14dbd-64e2-49e4-94b2-90b8dbd96242","Type":"ContainerStarted","Data":"11f3fd959815705e646d820b7a868c803e8e42580ecf442453f7c8a0f09e7b10"} Feb 27 08:51:02 crc kubenswrapper[4906]: I0227 08:51:02.581361 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f95d46d88-tlzqg" event={"ID":"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4","Type":"ContainerStarted","Data":"d890e754f86bef0288dc98e4fe5510ca1ecb2f6843657fc7b00599d15baeba68"} Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.272963 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-796c549d8f-qpw49"] Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.275448 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.277739 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.278146 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.291225 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-796c549d8f-qpw49"] Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.309220 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1774e857-2c80-489f-8985-11398d1727be-ovndb-tls-certs\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.309355 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1774e857-2c80-489f-8985-11398d1727be-internal-tls-certs\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.309382 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1774e857-2c80-489f-8985-11398d1727be-public-tls-certs\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.309418 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1774e857-2c80-489f-8985-11398d1727be-httpd-config\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.309434 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1774e857-2c80-489f-8985-11398d1727be-config\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.309453 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgf9p\" (UniqueName: \"kubernetes.io/projected/1774e857-2c80-489f-8985-11398d1727be-kube-api-access-kgf9p\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.309487 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1774e857-2c80-489f-8985-11398d1727be-combined-ca-bundle\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.410967 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1774e857-2c80-489f-8985-11398d1727be-internal-tls-certs\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.411031 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1774e857-2c80-489f-8985-11398d1727be-public-tls-certs\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.411089 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/1774e857-2c80-489f-8985-11398d1727be-config\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.411113 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1774e857-2c80-489f-8985-11398d1727be-httpd-config\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.411140 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgf9p\" (UniqueName: \"kubernetes.io/projected/1774e857-2c80-489f-8985-11398d1727be-kube-api-access-kgf9p\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.411187 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1774e857-2c80-489f-8985-11398d1727be-combined-ca-bundle\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.411246 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1774e857-2c80-489f-8985-11398d1727be-ovndb-tls-certs\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.419105 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/1774e857-2c80-489f-8985-11398d1727be-httpd-config\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.419105 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1774e857-2c80-489f-8985-11398d1727be-ovndb-tls-certs\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.419236 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/1774e857-2c80-489f-8985-11398d1727be-config\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.419739 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1774e857-2c80-489f-8985-11398d1727be-public-tls-certs\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.420081 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1774e857-2c80-489f-8985-11398d1727be-combined-ca-bundle\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.434095 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1774e857-2c80-489f-8985-11398d1727be-internal-tls-certs\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.441444 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgf9p\" (UniqueName: \"kubernetes.io/projected/1774e857-2c80-489f-8985-11398d1727be-kube-api-access-kgf9p\") pod \"neutron-796c549d8f-qpw49\" (UID: \"1774e857-2c80-489f-8985-11398d1727be\") " pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.592849 4906 generic.go:334] "Generic (PLEG): container finished" podID="3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7" containerID="074aab155c138dea42273142d3dd99e2b677ea08a3edf628dad71e9e6b3a0b15" exitCode=0 Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.592942 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jjnnc" event={"ID":"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7","Type":"ContainerDied","Data":"074aab155c138dea42273142d3dd99e2b677ea08a3edf628dad71e9e6b3a0b15"} Feb 27 08:51:03 crc kubenswrapper[4906]: I0227 08:51:03.594796 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:04 crc kubenswrapper[4906]: I0227 08:51:04.198483 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-796c549d8f-qpw49"] Feb 27 08:51:04 crc kubenswrapper[4906]: I0227 08:51:04.605216 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-796c549d8f-qpw49" event={"ID":"1774e857-2c80-489f-8985-11398d1727be","Type":"ContainerStarted","Data":"094e0a77f8028953f2557703c260eb91224c4c4b6b232d112d9bfb426a0ac1ad"} Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.040102 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jjnnc" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.160236 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-scripts\") pod \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\" (UID: \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\") " Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.160375 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s99hf\" (UniqueName: \"kubernetes.io/projected/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-kube-api-access-s99hf\") pod \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\" (UID: \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\") " Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.160418 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-combined-ca-bundle\") pod \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\" (UID: \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\") " Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.160477 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-config-data\") pod \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\" (UID: \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\") " Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.160496 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-logs\") pod \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\" (UID: \"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7\") " Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.161067 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-logs" (OuterVolumeSpecName: "logs") pod "3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7" (UID: "3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.172127 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-scripts" (OuterVolumeSpecName: "scripts") pod "3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7" (UID: "3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.173843 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-kube-api-access-s99hf" (OuterVolumeSpecName: "kube-api-access-s99hf") pod "3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7" (UID: "3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7"). InnerVolumeSpecName "kube-api-access-s99hf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.197129 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7" (UID: "3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.219425 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-config-data" (OuterVolumeSpecName: "config-data") pod "3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7" (UID: "3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.262747 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.262791 4906 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-logs\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.262804 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.262821 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s99hf\" (UniqueName: \"kubernetes.io/projected/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-kube-api-access-s99hf\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.262838 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.617005 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-jjnnc" event={"ID":"3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7","Type":"ContainerDied","Data":"3e3824cce68035fcd76a3166b79806f0ac6797bcc008e853d1b255218734cf47"} Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.617076 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e3824cce68035fcd76a3166b79806f0ac6797bcc008e853d1b255218734cf47" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.617083 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-jjnnc" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.811428 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-59d7bdb8d4-m9nsb"] Feb 27 08:51:05 crc kubenswrapper[4906]: E0227 08:51:05.811834 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7" containerName="placement-db-sync" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.811854 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7" containerName="placement-db-sync" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.812096 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7" containerName="placement-db-sync" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.843224 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-59d7bdb8d4-m9nsb"] Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.843385 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.851281 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.851645 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.852058 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.852355 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.852551 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-78pgm" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.879120 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/822cf36a-2c4f-4f54-a927-0adbc66b230f-public-tls-certs\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.879198 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9tvd\" (UniqueName: \"kubernetes.io/projected/822cf36a-2c4f-4f54-a927-0adbc66b230f-kube-api-access-h9tvd\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.879251 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/822cf36a-2c4f-4f54-a927-0adbc66b230f-scripts\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.879272 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/822cf36a-2c4f-4f54-a927-0adbc66b230f-internal-tls-certs\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.879305 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/822cf36a-2c4f-4f54-a927-0adbc66b230f-combined-ca-bundle\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.879325 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/822cf36a-2c4f-4f54-a927-0adbc66b230f-logs\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.879370 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/822cf36a-2c4f-4f54-a927-0adbc66b230f-config-data\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.982319 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/822cf36a-2c4f-4f54-a927-0adbc66b230f-public-tls-certs\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.982437 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9tvd\" (UniqueName: \"kubernetes.io/projected/822cf36a-2c4f-4f54-a927-0adbc66b230f-kube-api-access-h9tvd\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.982534 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/822cf36a-2c4f-4f54-a927-0adbc66b230f-scripts\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.982619 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/822cf36a-2c4f-4f54-a927-0adbc66b230f-internal-tls-certs\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.982680 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/822cf36a-2c4f-4f54-a927-0adbc66b230f-combined-ca-bundle\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.982750 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/822cf36a-2c4f-4f54-a927-0adbc66b230f-logs\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.982900 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/822cf36a-2c4f-4f54-a927-0adbc66b230f-config-data\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.984468 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/822cf36a-2c4f-4f54-a927-0adbc66b230f-logs\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.988360 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/822cf36a-2c4f-4f54-a927-0adbc66b230f-internal-tls-certs\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.988850 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/822cf36a-2c4f-4f54-a927-0adbc66b230f-public-tls-certs\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.990282 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/822cf36a-2c4f-4f54-a927-0adbc66b230f-scripts\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.991634 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/822cf36a-2c4f-4f54-a927-0adbc66b230f-config-data\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:05 crc kubenswrapper[4906]: I0227 08:51:05.994917 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/822cf36a-2c4f-4f54-a927-0adbc66b230f-combined-ca-bundle\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:06 crc kubenswrapper[4906]: I0227 08:51:06.005296 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9tvd\" (UniqueName: \"kubernetes.io/projected/822cf36a-2c4f-4f54-a927-0adbc66b230f-kube-api-access-h9tvd\") pod \"placement-59d7bdb8d4-m9nsb\" (UID: \"822cf36a-2c4f-4f54-a927-0adbc66b230f\") " pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:06 crc kubenswrapper[4906]: I0227 08:51:06.185231 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:06 crc kubenswrapper[4906]: I0227 08:51:06.659200 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-59d7bdb8d4-m9nsb"] Feb 27 08:51:07 crc kubenswrapper[4906]: I0227 08:51:07.642268 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-59d7bdb8d4-m9nsb" event={"ID":"822cf36a-2c4f-4f54-a927-0adbc66b230f","Type":"ContainerStarted","Data":"68aac0043833fd0d4faf353c6a0470a1a08518614dfcae445fe633f5ea838c5b"} Feb 27 08:51:07 crc kubenswrapper[4906]: I0227 08:51:07.643173 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-59d7bdb8d4-m9nsb" event={"ID":"822cf36a-2c4f-4f54-a927-0adbc66b230f","Type":"ContainerStarted","Data":"19cd37131ace01098b927c9bfe04b5a0f6968bf24054850ff2efd84eea893183"} Feb 27 08:51:07 crc kubenswrapper[4906]: I0227 08:51:07.646620 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-796c549d8f-qpw49" event={"ID":"1774e857-2c80-489f-8985-11398d1727be","Type":"ContainerStarted","Data":"32717b00c7f41b593ca3f21e7e538c671ed4c36a6083b4ce930b90eb02d37100"} Feb 27 08:51:07 crc kubenswrapper[4906]: I0227 08:51:07.649223 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f95d46d88-tlzqg" event={"ID":"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4","Type":"ContainerStarted","Data":"d9811e0f5aaa9de6a809c26ad72bccf19cb90003607d3ab9efb4105c167a5309"} Feb 27 08:51:07 crc kubenswrapper[4906]: I0227 08:51:07.653535 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" event={"ID":"95b14dbd-64e2-49e4-94b2-90b8dbd96242","Type":"ContainerStarted","Data":"680b769739629cc73e39897564bb3a93e5915b8d7ea08206e9c98908f3525a1f"} Feb 27 08:51:07 crc kubenswrapper[4906]: I0227 08:51:07.653937 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:07 crc kubenswrapper[4906]: I0227 08:51:07.683443 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" podStartSLOduration=7.683400933 podStartE2EDuration="7.683400933s" podCreationTimestamp="2026-02-27 08:51:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:51:07.676632926 +0000 UTC m=+1366.071034566" watchObservedRunningTime="2026-02-27 08:51:07.683400933 +0000 UTC m=+1366.077802543" Feb 27 08:51:10 crc kubenswrapper[4906]: I0227 08:51:10.695264 4906 generic.go:334] "Generic (PLEG): container finished" podID="e2e1d22f-d9ac-467f-81f9-e4a4282f46aa" containerID="72df2c35f2b9841c7cebfdf6f3a99c7d0b8409815b6af3a1c36acb589987223f" exitCode=0 Feb 27 08:51:10 crc kubenswrapper[4906]: I0227 08:51:10.695852 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-sgch9" event={"ID":"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa","Type":"ContainerDied","Data":"72df2c35f2b9841c7cebfdf6f3a99c7d0b8409815b6af3a1c36acb589987223f"} Feb 27 08:51:13 crc kubenswrapper[4906]: E0227 08:51:13.652478 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[etc-swift], unattached volumes=[], failed to process volumes=[]: context deadline exceeded" pod="openstack/swift-storage-0" podUID="c98486bd-1325-4072-bce0-a28d38ecead2" Feb 27 08:51:13 crc kubenswrapper[4906]: I0227 08:51:13.741579 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Feb 27 08:51:13 crc kubenswrapper[4906]: E0227 08:51:13.823872 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/ubi9/httpd-24:latest" Feb 27 08:51:13 crc kubenswrapper[4906]: E0227 08:51:13.824144 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:proxy-httpd,Image:registry.redhat.io/ubi9/httpd-24:latest,Command:[/usr/sbin/httpd],Args:[-DFOREGROUND],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:proxy-httpd,HostPort:0,ContainerPort:3000,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf/httpd.conf,SubPath:httpd.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/httpd/conf.d/ssl.conf,SubPath:ssl.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:run-httpd,ReadOnly:false,MountPath:/run/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:log-httpd,ReadOnly:false,MountPath:/var/log/httpd,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ldhgk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/,Port:{0 3000 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:10,TimeoutSeconds:30,PeriodSeconds:30,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(30498343-6254-49c2-8220-9df92217cb8f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 27 08:51:13 crc kubenswrapper[4906]: E0227 08:51:13.826998 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"proxy-httpd\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"]" pod="openstack/ceilometer-0" podUID="30498343-6254-49c2-8220-9df92217cb8f" Feb 27 08:51:13 crc kubenswrapper[4906]: I0227 08:51:13.963351 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-sgch9" Feb 27 08:51:13 crc kubenswrapper[4906]: I0227 08:51:13.997688 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-combined-ca-bundle\") pod \"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa\" (UID: \"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa\") " Feb 27 08:51:13 crc kubenswrapper[4906]: I0227 08:51:13.997755 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kk88q\" (UniqueName: \"kubernetes.io/projected/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-kube-api-access-kk88q\") pod \"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa\" (UID: \"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa\") " Feb 27 08:51:13 crc kubenswrapper[4906]: I0227 08:51:13.997812 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-config-data\") pod \"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa\" (UID: \"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa\") " Feb 27 08:51:13 crc kubenswrapper[4906]: I0227 08:51:13.998028 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-db-sync-config-data\") pod \"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa\" (UID: \"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa\") " Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.004289 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "e2e1d22f-d9ac-467f-81f9-e4a4282f46aa" (UID: "e2e1d22f-d9ac-467f-81f9-e4a4282f46aa"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.015213 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-kube-api-access-kk88q" (OuterVolumeSpecName: "kube-api-access-kk88q") pod "e2e1d22f-d9ac-467f-81f9-e4a4282f46aa" (UID: "e2e1d22f-d9ac-467f-81f9-e4a4282f46aa"). InnerVolumeSpecName "kube-api-access-kk88q". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.053831 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e2e1d22f-d9ac-467f-81f9-e4a4282f46aa" (UID: "e2e1d22f-d9ac-467f-81f9-e4a4282f46aa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.078345 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-config-data" (OuterVolumeSpecName: "config-data") pod "e2e1d22f-d9ac-467f-81f9-e4a4282f46aa" (UID: "e2e1d22f-d9ac-467f-81f9-e4a4282f46aa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.100792 4906 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.100823 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.100837 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kk88q\" (UniqueName: \"kubernetes.io/projected/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-kube-api-access-kk88q\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.100850 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.753315 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f95d46d88-tlzqg" event={"ID":"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4","Type":"ContainerStarted","Data":"86dd6449fddc39957e004f7b6b397c4d4621611eda44f64973038e9868bc3fb9"} Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.753671 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.762210 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-59d7bdb8d4-m9nsb" event={"ID":"822cf36a-2c4f-4f54-a927-0adbc66b230f","Type":"ContainerStarted","Data":"89a3775f9e574ffb179b6672205bd48b0ad0601fe0b4e875c5f4590526f98712"} Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.762510 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.774998 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-sgch9" Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.775009 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-sgch9" event={"ID":"e2e1d22f-d9ac-467f-81f9-e4a4282f46aa","Type":"ContainerDied","Data":"6913d318e7ba92353912b3906f9d06112d1a769806ff9604c46e6815e87d854f"} Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.775096 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6913d318e7ba92353912b3906f9d06112d1a769806ff9604c46e6815e87d854f" Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.778920 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7f78987f9b-lzmw8" event={"ID":"6dc6534b-d5ec-4c53-bfc1-aae2389e3755","Type":"ContainerStarted","Data":"d542ece6315fc2b7d40f42b68cd715beb917e37c0786859ba8e0b40c6b5a108f"} Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.778990 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7f78987f9b-lzmw8" event={"ID":"6dc6534b-d5ec-4c53-bfc1-aae2389e3755","Type":"ContainerStarted","Data":"98a6fc2fc04883b13afcb9bc4793ed6876d7660e36bbd6bdc18b229ed721fe2f"} Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.790062 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5f95d46d88-tlzqg" podStartSLOduration=14.789968411 podStartE2EDuration="14.789968411s" podCreationTimestamp="2026-02-27 08:51:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:51:14.78344853 +0000 UTC m=+1373.177850150" watchObservedRunningTime="2026-02-27 08:51:14.789968411 +0000 UTC m=+1373.184370021" Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.791202 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-796c549d8f-qpw49" event={"ID":"1774e857-2c80-489f-8985-11398d1727be","Type":"ContainerStarted","Data":"a9b7c59dc7f7cbaab6d895a74169783cee5285c4f1acd276d3608e38ff001f94"} Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.791611 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.801527 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-98c78d5f8-j9wmn" event={"ID":"e6ab1c10-b552-4a69-94c7-68280ab7e126","Type":"ContainerStarted","Data":"d1136f91f3db289760b67bf78418a2809b37b6cc1c92b62bf1341729971407ed"} Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.801607 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-98c78d5f8-j9wmn" event={"ID":"e6ab1c10-b552-4a69-94c7-68280ab7e126","Type":"ContainerStarted","Data":"ede6b37c237a95667008a06057fd933008e9e665abcee2a3c2a28ce6a6594391"} Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.807586 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="30498343-6254-49c2-8220-9df92217cb8f" containerName="ceilometer-notification-agent" containerID="cri-o://b6d1f7754d20d1bbedc3a306957ed20f39c9658c5e93b3850ede31e6b6d7a451" gracePeriod=30 Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.807734 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-d4fhc" event={"ID":"b36228db-b66d-4815-ac1c-e58b85ee3bbf","Type":"ContainerStarted","Data":"60cfeacf822988c4a0a71e9f4902d3c855faa08dabb8cce270594b0a54784f19"} Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.808777 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="30498343-6254-49c2-8220-9df92217cb8f" containerName="sg-core" containerID="cri-o://c008eba5964801287e746052e23aec6b88482120d86e46da7b4d9250c8f87062" gracePeriod=30 Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.826160 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7f78987f9b-lzmw8" podStartSLOduration=2.674646935 podStartE2EDuration="53.82613078s" podCreationTimestamp="2026-02-27 08:50:21 +0000 UTC" firstStartedPulling="2026-02-27 08:50:22.852950781 +0000 UTC m=+1321.247352391" lastFinishedPulling="2026-02-27 08:51:14.004434626 +0000 UTC m=+1372.398836236" observedRunningTime="2026-02-27 08:51:14.817928275 +0000 UTC m=+1373.212329885" watchObservedRunningTime="2026-02-27 08:51:14.82613078 +0000 UTC m=+1373.220532380" Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.862667 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-59d7bdb8d4-m9nsb" podStartSLOduration=9.862641438 podStartE2EDuration="9.862641438s" podCreationTimestamp="2026-02-27 08:51:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:51:14.842690974 +0000 UTC m=+1373.237092594" watchObservedRunningTime="2026-02-27 08:51:14.862641438 +0000 UTC m=+1373.257043048" Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.877193 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-d4fhc" podStartSLOduration=2.737247015 podStartE2EDuration="1m1.877164389s" podCreationTimestamp="2026-02-27 08:50:13 +0000 UTC" firstStartedPulling="2026-02-27 08:50:14.894215381 +0000 UTC m=+1313.288616991" lastFinishedPulling="2026-02-27 08:51:14.034132765 +0000 UTC m=+1372.428534365" observedRunningTime="2026-02-27 08:51:14.874026586 +0000 UTC m=+1373.268428196" watchObservedRunningTime="2026-02-27 08:51:14.877164389 +0000 UTC m=+1373.271566019" Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.959092 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-98c78d5f8-j9wmn" podStartSLOduration=2.664191432 podStartE2EDuration="53.959072597s" podCreationTimestamp="2026-02-27 08:50:21 +0000 UTC" firstStartedPulling="2026-02-27 08:50:22.709818278 +0000 UTC m=+1321.104219888" lastFinishedPulling="2026-02-27 08:51:14.004699433 +0000 UTC m=+1372.399101053" observedRunningTime="2026-02-27 08:51:14.958539313 +0000 UTC m=+1373.352940923" watchObservedRunningTime="2026-02-27 08:51:14.959072597 +0000 UTC m=+1373.353474207" Feb 27 08:51:14 crc kubenswrapper[4906]: I0227 08:51:14.993322 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-796c549d8f-qpw49" podStartSLOduration=11.993293435 podStartE2EDuration="11.993293435s" podCreationTimestamp="2026-02-27 08:51:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:51:14.981023643 +0000 UTC m=+1373.375425253" watchObservedRunningTime="2026-02-27 08:51:14.993293435 +0000 UTC m=+1373.387695045" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.650963 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54b684dc7c-bw6mb"] Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.651475 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" podUID="95b14dbd-64e2-49e4-94b2-90b8dbd96242" containerName="dnsmasq-dns" containerID="cri-o://680b769739629cc73e39897564bb3a93e5915b8d7ea08206e9c98908f3525a1f" gracePeriod=10 Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.653034 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.740035 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-fb745b69-c9pjn"] Feb 27 08:51:15 crc kubenswrapper[4906]: E0227 08:51:15.740517 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2e1d22f-d9ac-467f-81f9-e4a4282f46aa" containerName="glance-db-sync" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.740542 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2e1d22f-d9ac-467f-81f9-e4a4282f46aa" containerName="glance-db-sync" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.740727 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2e1d22f-d9ac-467f-81f9-e4a4282f46aa" containerName="glance-db-sync" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.741801 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.778616 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fb745b69-c9pjn"] Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.851979 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-ovsdbserver-nb\") pod \"dnsmasq-dns-fb745b69-c9pjn\" (UID: \"573121fa-4108-4583-9df9-cab33de5b148\") " pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.852109 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-dns-svc\") pod \"dnsmasq-dns-fb745b69-c9pjn\" (UID: \"573121fa-4108-4583-9df9-cab33de5b148\") " pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.852137 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bllzs\" (UniqueName: \"kubernetes.io/projected/573121fa-4108-4583-9df9-cab33de5b148-kube-api-access-bllzs\") pod \"dnsmasq-dns-fb745b69-c9pjn\" (UID: \"573121fa-4108-4583-9df9-cab33de5b148\") " pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.852174 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-ovsdbserver-sb\") pod \"dnsmasq-dns-fb745b69-c9pjn\" (UID: \"573121fa-4108-4583-9df9-cab33de5b148\") " pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.852220 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-config\") pod \"dnsmasq-dns-fb745b69-c9pjn\" (UID: \"573121fa-4108-4583-9df9-cab33de5b148\") " pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.877490 4906 generic.go:334] "Generic (PLEG): container finished" podID="30498343-6254-49c2-8220-9df92217cb8f" containerID="c008eba5964801287e746052e23aec6b88482120d86e46da7b4d9250c8f87062" exitCode=2 Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.877630 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30498343-6254-49c2-8220-9df92217cb8f","Type":"ContainerDied","Data":"c008eba5964801287e746052e23aec6b88482120d86e46da7b4d9250c8f87062"} Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.887062 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-pg9nd" event={"ID":"76d04662-7576-4f57-aca2-e118e5efd771","Type":"ContainerStarted","Data":"7afff095932d9429eea67d34bf6f9ac2503e2c1396391f4b0d1fa256b46e910f"} Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.887108 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.956800 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-dns-svc\") pod \"dnsmasq-dns-fb745b69-c9pjn\" (UID: \"573121fa-4108-4583-9df9-cab33de5b148\") " pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.956904 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bllzs\" (UniqueName: \"kubernetes.io/projected/573121fa-4108-4583-9df9-cab33de5b148-kube-api-access-bllzs\") pod \"dnsmasq-dns-fb745b69-c9pjn\" (UID: \"573121fa-4108-4583-9df9-cab33de5b148\") " pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.957177 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-ovsdbserver-sb\") pod \"dnsmasq-dns-fb745b69-c9pjn\" (UID: \"573121fa-4108-4583-9df9-cab33de5b148\") " pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.957346 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-config\") pod \"dnsmasq-dns-fb745b69-c9pjn\" (UID: \"573121fa-4108-4583-9df9-cab33de5b148\") " pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.957440 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-ovsdbserver-nb\") pod \"dnsmasq-dns-fb745b69-c9pjn\" (UID: \"573121fa-4108-4583-9df9-cab33de5b148\") " pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.961471 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-ovsdbserver-nb\") pod \"dnsmasq-dns-fb745b69-c9pjn\" (UID: \"573121fa-4108-4583-9df9-cab33de5b148\") " pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.966634 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-dns-svc\") pod \"dnsmasq-dns-fb745b69-c9pjn\" (UID: \"573121fa-4108-4583-9df9-cab33de5b148\") " pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.968841 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-ovsdbserver-sb\") pod \"dnsmasq-dns-fb745b69-c9pjn\" (UID: \"573121fa-4108-4583-9df9-cab33de5b148\") " pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.969926 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-config\") pod \"dnsmasq-dns-fb745b69-c9pjn\" (UID: \"573121fa-4108-4583-9df9-cab33de5b148\") " pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:15 crc kubenswrapper[4906]: I0227 08:51:15.979714 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-pg9nd" podStartSLOduration=3.937856908 podStartE2EDuration="1m2.979688229s" podCreationTimestamp="2026-02-27 08:50:13 +0000 UTC" firstStartedPulling="2026-02-27 08:50:14.962682567 +0000 UTC m=+1313.357084177" lastFinishedPulling="2026-02-27 08:51:14.004513888 +0000 UTC m=+1372.398915498" observedRunningTime="2026-02-27 08:51:15.960139306 +0000 UTC m=+1374.354540926" watchObservedRunningTime="2026-02-27 08:51:15.979688229 +0000 UTC m=+1374.374089839" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.010751 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bllzs\" (UniqueName: \"kubernetes.io/projected/573121fa-4108-4583-9df9-cab33de5b148-kube-api-access-bllzs\") pod \"dnsmasq-dns-fb745b69-c9pjn\" (UID: \"573121fa-4108-4583-9df9-cab33de5b148\") " pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.068114 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.130603 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" podUID="95b14dbd-64e2-49e4-94b2-90b8dbd96242" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.153:5353: connect: connection refused" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.406145 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.412957 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.417277 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-rc48q" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.417987 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.418352 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.430888 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.474940 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.475023 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.475076 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.475121 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-logs\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.475208 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-config-data\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.475697 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-scripts\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.475806 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8v9n\" (UniqueName: \"kubernetes.io/projected/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-kube-api-access-h8v9n\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.579414 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-config-data\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.580962 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-scripts\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.581108 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8v9n\" (UniqueName: \"kubernetes.io/projected/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-kube-api-access-h8v9n\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.581317 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.581439 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.581566 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.581714 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-logs\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.582224 4906 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.582399 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-logs\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.582484 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.590462 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-scripts\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.612860 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.624798 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-config-data\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.632732 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8v9n\" (UniqueName: \"kubernetes.io/projected/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-kube-api-access-h8v9n\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.654894 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.758271 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.767431 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.784649 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fm97c\" (UniqueName: \"kubernetes.io/projected/95b14dbd-64e2-49e4-94b2-90b8dbd96242-kube-api-access-fm97c\") pod \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\" (UID: \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\") " Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.784744 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-ovsdbserver-sb\") pod \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\" (UID: \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\") " Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.784802 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-ovsdbserver-nb\") pod \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\" (UID: \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\") " Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.784866 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-dns-svc\") pod \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\" (UID: \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\") " Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.784950 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-config\") pod \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\" (UID: \"95b14dbd-64e2-49e4-94b2-90b8dbd96242\") " Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.817286 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/95b14dbd-64e2-49e4-94b2-90b8dbd96242-kube-api-access-fm97c" (OuterVolumeSpecName: "kube-api-access-fm97c") pod "95b14dbd-64e2-49e4-94b2-90b8dbd96242" (UID: "95b14dbd-64e2-49e4-94b2-90b8dbd96242"). InnerVolumeSpecName "kube-api-access-fm97c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.865842 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-fb745b69-c9pjn"] Feb 27 08:51:16 crc kubenswrapper[4906]: W0227 08:51:16.881061 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod573121fa_4108_4583_9df9_cab33de5b148.slice/crio-a5ca13e1c18a9c1b1d5b3506f745f7d7be8703a9c703aaae37e3da29d5c6c78e WatchSource:0}: Error finding container a5ca13e1c18a9c1b1d5b3506f745f7d7be8703a9c703aaae37e3da29d5c6c78e: Status 404 returned error can't find the container with id a5ca13e1c18a9c1b1d5b3506f745f7d7be8703a9c703aaae37e3da29d5c6c78e Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.886961 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-config" (OuterVolumeSpecName: "config") pod "95b14dbd-64e2-49e4-94b2-90b8dbd96242" (UID: "95b14dbd-64e2-49e4-94b2-90b8dbd96242"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.889075 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fm97c\" (UniqueName: \"kubernetes.io/projected/95b14dbd-64e2-49e4-94b2-90b8dbd96242-kube-api-access-fm97c\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.890680 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.939272 4906 generic.go:334] "Generic (PLEG): container finished" podID="30498343-6254-49c2-8220-9df92217cb8f" containerID="b6d1f7754d20d1bbedc3a306957ed20f39c9658c5e93b3850ede31e6b6d7a451" exitCode=0 Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.939433 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30498343-6254-49c2-8220-9df92217cb8f","Type":"ContainerDied","Data":"b6d1f7754d20d1bbedc3a306957ed20f39c9658c5e93b3850ede31e6b6d7a451"} Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.956483 4906 generic.go:334] "Generic (PLEG): container finished" podID="95b14dbd-64e2-49e4-94b2-90b8dbd96242" containerID="680b769739629cc73e39897564bb3a93e5915b8d7ea08206e9c98908f3525a1f" exitCode=0 Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.956633 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" event={"ID":"95b14dbd-64e2-49e4-94b2-90b8dbd96242","Type":"ContainerDied","Data":"680b769739629cc73e39897564bb3a93e5915b8d7ea08206e9c98908f3525a1f"} Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.956671 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" event={"ID":"95b14dbd-64e2-49e4-94b2-90b8dbd96242","Type":"ContainerDied","Data":"11f3fd959815705e646d820b7a868c803e8e42580ecf442453f7c8a0f09e7b10"} Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.956689 4906 scope.go:117] "RemoveContainer" containerID="680b769739629cc73e39897564bb3a93e5915b8d7ea08206e9c98908f3525a1f" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.956854 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54b684dc7c-bw6mb" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.963427 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "95b14dbd-64e2-49e4-94b2-90b8dbd96242" (UID: "95b14dbd-64e2-49e4-94b2-90b8dbd96242"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.974154 4906 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.975075 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fb745b69-c9pjn" event={"ID":"573121fa-4108-4583-9df9-cab33de5b148","Type":"ContainerStarted","Data":"a5ca13e1c18a9c1b1d5b3506f745f7d7be8703a9c703aaae37e3da29d5c6c78e"} Feb 27 08:51:16 crc kubenswrapper[4906]: I0227 08:51:16.993368 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.023752 4906 scope.go:117] "RemoveContainer" containerID="cbef2bbfc4fcd494d2453c52a9afafaadcee24e8f20a47c47c716ce4dc86d1ed" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.049343 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "95b14dbd-64e2-49e4-94b2-90b8dbd96242" (UID: "95b14dbd-64e2-49e4-94b2-90b8dbd96242"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.050315 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "95b14dbd-64e2-49e4-94b2-90b8dbd96242" (UID: "95b14dbd-64e2-49e4-94b2-90b8dbd96242"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.060744 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 27 08:51:17 crc kubenswrapper[4906]: E0227 08:51:17.061221 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95b14dbd-64e2-49e4-94b2-90b8dbd96242" containerName="init" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.061242 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="95b14dbd-64e2-49e4-94b2-90b8dbd96242" containerName="init" Feb 27 08:51:17 crc kubenswrapper[4906]: E0227 08:51:17.061263 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="95b14dbd-64e2-49e4-94b2-90b8dbd96242" containerName="dnsmasq-dns" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.061270 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="95b14dbd-64e2-49e4-94b2-90b8dbd96242" containerName="dnsmasq-dns" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.061453 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="95b14dbd-64e2-49e4-94b2-90b8dbd96242" containerName="dnsmasq-dns" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.062476 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.070014 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.095558 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.095664 4906 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/95b14dbd-64e2-49e4-94b2-90b8dbd96242-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.097683 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.197221 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/47a1c965-c04a-4804-bcc8-067fd0235ac2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.197281 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47a1c965-c04a-4804-bcc8-067fd0235ac2-logs\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.197306 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.197322 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47a1c965-c04a-4804-bcc8-067fd0235ac2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.197341 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47a1c965-c04a-4804-bcc8-067fd0235ac2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.197395 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bf7gm\" (UniqueName: \"kubernetes.io/projected/47a1c965-c04a-4804-bcc8-067fd0235ac2-kube-api-access-bf7gm\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.197435 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47a1c965-c04a-4804-bcc8-067fd0235ac2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.204034 4906 scope.go:117] "RemoveContainer" containerID="680b769739629cc73e39897564bb3a93e5915b8d7ea08206e9c98908f3525a1f" Feb 27 08:51:17 crc kubenswrapper[4906]: E0227 08:51:17.206413 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"680b769739629cc73e39897564bb3a93e5915b8d7ea08206e9c98908f3525a1f\": container with ID starting with 680b769739629cc73e39897564bb3a93e5915b8d7ea08206e9c98908f3525a1f not found: ID does not exist" containerID="680b769739629cc73e39897564bb3a93e5915b8d7ea08206e9c98908f3525a1f" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.206451 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"680b769739629cc73e39897564bb3a93e5915b8d7ea08206e9c98908f3525a1f"} err="failed to get container status \"680b769739629cc73e39897564bb3a93e5915b8d7ea08206e9c98908f3525a1f\": rpc error: code = NotFound desc = could not find container \"680b769739629cc73e39897564bb3a93e5915b8d7ea08206e9c98908f3525a1f\": container with ID starting with 680b769739629cc73e39897564bb3a93e5915b8d7ea08206e9c98908f3525a1f not found: ID does not exist" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.206475 4906 scope.go:117] "RemoveContainer" containerID="cbef2bbfc4fcd494d2453c52a9afafaadcee24e8f20a47c47c716ce4dc86d1ed" Feb 27 08:51:17 crc kubenswrapper[4906]: E0227 08:51:17.210081 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbef2bbfc4fcd494d2453c52a9afafaadcee24e8f20a47c47c716ce4dc86d1ed\": container with ID starting with cbef2bbfc4fcd494d2453c52a9afafaadcee24e8f20a47c47c716ce4dc86d1ed not found: ID does not exist" containerID="cbef2bbfc4fcd494d2453c52a9afafaadcee24e8f20a47c47c716ce4dc86d1ed" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.210132 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbef2bbfc4fcd494d2453c52a9afafaadcee24e8f20a47c47c716ce4dc86d1ed"} err="failed to get container status \"cbef2bbfc4fcd494d2453c52a9afafaadcee24e8f20a47c47c716ce4dc86d1ed\": rpc error: code = NotFound desc = could not find container \"cbef2bbfc4fcd494d2453c52a9afafaadcee24e8f20a47c47c716ce4dc86d1ed\": container with ID starting with cbef2bbfc4fcd494d2453c52a9afafaadcee24e8f20a47c47c716ce4dc86d1ed not found: ID does not exist" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.291973 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-54b684dc7c-bw6mb"] Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.299517 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/47a1c965-c04a-4804-bcc8-067fd0235ac2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.299589 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47a1c965-c04a-4804-bcc8-067fd0235ac2-logs\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.299635 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.299657 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47a1c965-c04a-4804-bcc8-067fd0235ac2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.299680 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47a1c965-c04a-4804-bcc8-067fd0235ac2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.299765 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bf7gm\" (UniqueName: \"kubernetes.io/projected/47a1c965-c04a-4804-bcc8-067fd0235ac2-kube-api-access-bf7gm\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.299832 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47a1c965-c04a-4804-bcc8-067fd0235ac2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.300272 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-54b684dc7c-bw6mb"] Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.300676 4906 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.302741 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47a1c965-c04a-4804-bcc8-067fd0235ac2-logs\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.303061 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/47a1c965-c04a-4804-bcc8-067fd0235ac2-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.313075 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47a1c965-c04a-4804-bcc8-067fd0235ac2-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.323245 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47a1c965-c04a-4804-bcc8-067fd0235ac2-config-data\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.323733 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47a1c965-c04a-4804-bcc8-067fd0235ac2-scripts\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.357247 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bf7gm\" (UniqueName: \"kubernetes.io/projected/47a1c965-c04a-4804-bcc8-067fd0235ac2-kube-api-access-bf7gm\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.383417 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.471062 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.471722 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.561988 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.605797 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30498343-6254-49c2-8220-9df92217cb8f-run-httpd\") pod \"30498343-6254-49c2-8220-9df92217cb8f\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.605865 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-scripts\") pod \"30498343-6254-49c2-8220-9df92217cb8f\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.606038 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ldhgk\" (UniqueName: \"kubernetes.io/projected/30498343-6254-49c2-8220-9df92217cb8f-kube-api-access-ldhgk\") pod \"30498343-6254-49c2-8220-9df92217cb8f\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.606437 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30498343-6254-49c2-8220-9df92217cb8f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "30498343-6254-49c2-8220-9df92217cb8f" (UID: "30498343-6254-49c2-8220-9df92217cb8f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.610385 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-config-data\") pod \"30498343-6254-49c2-8220-9df92217cb8f\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.610592 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30498343-6254-49c2-8220-9df92217cb8f-log-httpd\") pod \"30498343-6254-49c2-8220-9df92217cb8f\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.610721 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-sg-core-conf-yaml\") pod \"30498343-6254-49c2-8220-9df92217cb8f\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.610742 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-combined-ca-bundle\") pod \"30498343-6254-49c2-8220-9df92217cb8f\" (UID: \"30498343-6254-49c2-8220-9df92217cb8f\") " Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.611422 4906 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30498343-6254-49c2-8220-9df92217cb8f-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.630788 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/30498343-6254-49c2-8220-9df92217cb8f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "30498343-6254-49c2-8220-9df92217cb8f" (UID: "30498343-6254-49c2-8220-9df92217cb8f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.635376 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30498343-6254-49c2-8220-9df92217cb8f-kube-api-access-ldhgk" (OuterVolumeSpecName: "kube-api-access-ldhgk") pod "30498343-6254-49c2-8220-9df92217cb8f" (UID: "30498343-6254-49c2-8220-9df92217cb8f"). InnerVolumeSpecName "kube-api-access-ldhgk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.635923 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-scripts" (OuterVolumeSpecName: "scripts") pod "30498343-6254-49c2-8220-9df92217cb8f" (UID: "30498343-6254-49c2-8220-9df92217cb8f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.671984 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "30498343-6254-49c2-8220-9df92217cb8f" (UID: "30498343-6254-49c2-8220-9df92217cb8f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.690846 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-config-data" (OuterVolumeSpecName: "config-data") pod "30498343-6254-49c2-8220-9df92217cb8f" (UID: "30498343-6254-49c2-8220-9df92217cb8f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.717287 4906 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/30498343-6254-49c2-8220-9df92217cb8f-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.717324 4906 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.717336 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.717345 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ldhgk\" (UniqueName: \"kubernetes.io/projected/30498343-6254-49c2-8220-9df92217cb8f-kube-api-access-ldhgk\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.717353 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.717449 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "30498343-6254-49c2-8220-9df92217cb8f" (UID: "30498343-6254-49c2-8220-9df92217cb8f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.797257 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 27 08:51:17 crc kubenswrapper[4906]: W0227 08:51:17.802472 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8f7fb2c4_3404_4a84_9c80_c20f3eb7539d.slice/crio-535f930f249c0d0436175662ff386714f9e592e5a15366429bc9cd6801fa423e WatchSource:0}: Error finding container 535f930f249c0d0436175662ff386714f9e592e5a15366429bc9cd6801fa423e: Status 404 returned error can't find the container with id 535f930f249c0d0436175662ff386714f9e592e5a15366429bc9cd6801fa423e Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.834164 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/30498343-6254-49c2-8220-9df92217cb8f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.995568 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"30498343-6254-49c2-8220-9df92217cb8f","Type":"ContainerDied","Data":"71d11135b35f0718165afa91c8a2ba7b84e21ffd73415381c3a8dcdfaa174790"} Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.995640 4906 scope.go:117] "RemoveContainer" containerID="c008eba5964801287e746052e23aec6b88482120d86e46da7b4d9250c8f87062" Feb 27 08:51:17 crc kubenswrapper[4906]: I0227 08:51:17.995798 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.018644 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d","Type":"ContainerStarted","Data":"535f930f249c0d0436175662ff386714f9e592e5a15366429bc9cd6801fa423e"} Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.030476 4906 generic.go:334] "Generic (PLEG): container finished" podID="573121fa-4108-4583-9df9-cab33de5b148" containerID="bfbbc307e6deb9674dfad51046e7390c52a99e8d747a93daa39f174265db4819" exitCode=0 Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.032057 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fb745b69-c9pjn" event={"ID":"573121fa-4108-4583-9df9-cab33de5b148","Type":"ContainerDied","Data":"bfbbc307e6deb9674dfad51046e7390c52a99e8d747a93daa39f174265db4819"} Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.209131 4906 scope.go:117] "RemoveContainer" containerID="b6d1f7754d20d1bbedc3a306957ed20f39c9658c5e93b3850ede31e6b6d7a451" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.279860 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.303011 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.328237 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:51:18 crc kubenswrapper[4906]: E0227 08:51:18.329402 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30498343-6254-49c2-8220-9df92217cb8f" containerName="ceilometer-notification-agent" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.329423 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="30498343-6254-49c2-8220-9df92217cb8f" containerName="ceilometer-notification-agent" Feb 27 08:51:18 crc kubenswrapper[4906]: E0227 08:51:18.329446 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30498343-6254-49c2-8220-9df92217cb8f" containerName="sg-core" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.329452 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="30498343-6254-49c2-8220-9df92217cb8f" containerName="sg-core" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.331012 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="30498343-6254-49c2-8220-9df92217cb8f" containerName="sg-core" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.331044 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="30498343-6254-49c2-8220-9df92217cb8f" containerName="ceilometer-notification-agent" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.333253 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.340044 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.340297 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.348735 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.381337 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-config-data\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.381435 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/54ba4949-578e-4c0a-94d2-0add9be8821d-log-httpd\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.381495 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/54ba4949-578e-4c0a-94d2-0add9be8821d-run-httpd\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.381577 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-scripts\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.381771 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.382032 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tx6vd\" (UniqueName: \"kubernetes.io/projected/54ba4949-578e-4c0a-94d2-0add9be8821d-kube-api-access-tx6vd\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.382127 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.417929 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.483609 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/54ba4949-578e-4c0a-94d2-0add9be8821d-log-httpd\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.483713 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/54ba4949-578e-4c0a-94d2-0add9be8821d-run-httpd\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.483764 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-scripts\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.483816 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.483857 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tx6vd\" (UniqueName: \"kubernetes.io/projected/54ba4949-578e-4c0a-94d2-0add9be8821d-kube-api-access-tx6vd\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.483921 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.483943 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-config-data\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.487403 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/54ba4949-578e-4c0a-94d2-0add9be8821d-log-httpd\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.487825 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/54ba4949-578e-4c0a-94d2-0add9be8821d-run-httpd\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.492853 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-config-data\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.493988 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.494235 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-scripts\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.495376 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.506621 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tx6vd\" (UniqueName: \"kubernetes.io/projected/54ba4949-578e-4c0a-94d2-0add9be8821d-kube-api-access-tx6vd\") pod \"ceilometer-0\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.572860 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30498343-6254-49c2-8220-9df92217cb8f" path="/var/lib/kubelet/pods/30498343-6254-49c2-8220-9df92217cb8f/volumes" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.576083 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="95b14dbd-64e2-49e4-94b2-90b8dbd96242" path="/var/lib/kubelet/pods/95b14dbd-64e2-49e4-94b2-90b8dbd96242/volumes" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.663456 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.795968 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.802201 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/c98486bd-1325-4072-bce0-a28d38ecead2-etc-swift\") pod \"swift-storage-0\" (UID: \"c98486bd-1325-4072-bce0-a28d38ecead2\") " pod="openstack/swift-storage-0" Feb 27 08:51:18 crc kubenswrapper[4906]: I0227 08:51:18.845511 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Feb 27 08:51:19 crc kubenswrapper[4906]: I0227 08:51:19.045296 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"47a1c965-c04a-4804-bcc8-067fd0235ac2","Type":"ContainerStarted","Data":"83faa194da7206d1279298b587e70eb59c465d16200d9051cc553b5e4ed12263"} Feb 27 08:51:19 crc kubenswrapper[4906]: I0227 08:51:19.048810 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fb745b69-c9pjn" event={"ID":"573121fa-4108-4583-9df9-cab33de5b148","Type":"ContainerStarted","Data":"95e95a754faab71e5c8c6ae374038de1a613ad1a7d9edf0a1d1eb41fe3601bcc"} Feb 27 08:51:19 crc kubenswrapper[4906]: I0227 08:51:19.050067 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:19 crc kubenswrapper[4906]: I0227 08:51:19.105785 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-fb745b69-c9pjn" podStartSLOduration=4.105760957 podStartE2EDuration="4.105760957s" podCreationTimestamp="2026-02-27 08:51:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:51:19.079364604 +0000 UTC m=+1377.473766214" watchObservedRunningTime="2026-02-27 08:51:19.105760957 +0000 UTC m=+1377.500162567" Feb 27 08:51:19 crc kubenswrapper[4906]: I0227 08:51:19.212686 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 27 08:51:19 crc kubenswrapper[4906]: I0227 08:51:19.274767 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:51:19 crc kubenswrapper[4906]: W0227 08:51:19.283853 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod54ba4949_578e_4c0a_94d2_0add9be8821d.slice/crio-32769346475cf4bc41ee2a3b882eb982b137ea8d0e63ea1764dcffa7360cdf70 WatchSource:0}: Error finding container 32769346475cf4bc41ee2a3b882eb982b137ea8d0e63ea1764dcffa7360cdf70: Status 404 returned error can't find the container with id 32769346475cf4bc41ee2a3b882eb982b137ea8d0e63ea1764dcffa7360cdf70 Feb 27 08:51:19 crc kubenswrapper[4906]: I0227 08:51:19.340710 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 27 08:51:19 crc kubenswrapper[4906]: I0227 08:51:19.614924 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Feb 27 08:51:20 crc kubenswrapper[4906]: I0227 08:51:20.071047 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d","Type":"ContainerStarted","Data":"3c6179c3ca2890e438bb7820de44485d5d610e0899415eddb16ef6a40dbcb020"} Feb 27 08:51:20 crc kubenswrapper[4906]: I0227 08:51:20.074320 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"47a1c965-c04a-4804-bcc8-067fd0235ac2","Type":"ContainerStarted","Data":"7f9a0ff689aa6a3d888119c22f8c2a208c007d98729b9f356cefb94b093e9ea0"} Feb 27 08:51:20 crc kubenswrapper[4906]: I0227 08:51:20.076291 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"54ba4949-578e-4c0a-94d2-0add9be8821d","Type":"ContainerStarted","Data":"32769346475cf4bc41ee2a3b882eb982b137ea8d0e63ea1764dcffa7360cdf70"} Feb 27 08:51:20 crc kubenswrapper[4906]: I0227 08:51:20.078673 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c98486bd-1325-4072-bce0-a28d38ecead2","Type":"ContainerStarted","Data":"47cd77689629d87db6372f7d854e7812b986a59f7014ceadef5dd67844977643"} Feb 27 08:51:21 crc kubenswrapper[4906]: I0227 08:51:21.096277 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"47a1c965-c04a-4804-bcc8-067fd0235ac2","Type":"ContainerStarted","Data":"0fde501ab8d884abe10c928bd413fc198c0e15d1f8ae23b2bd1cad23a8fadec0"} Feb 27 08:51:21 crc kubenswrapper[4906]: I0227 08:51:21.096491 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="47a1c965-c04a-4804-bcc8-067fd0235ac2" containerName="glance-log" containerID="cri-o://7f9a0ff689aa6a3d888119c22f8c2a208c007d98729b9f356cefb94b093e9ea0" gracePeriod=30 Feb 27 08:51:21 crc kubenswrapper[4906]: I0227 08:51:21.096569 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="47a1c965-c04a-4804-bcc8-067fd0235ac2" containerName="glance-httpd" containerID="cri-o://0fde501ab8d884abe10c928bd413fc198c0e15d1f8ae23b2bd1cad23a8fadec0" gracePeriod=30 Feb 27 08:51:21 crc kubenswrapper[4906]: I0227 08:51:21.108284 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"54ba4949-578e-4c0a-94d2-0add9be8821d","Type":"ContainerStarted","Data":"d3eee1edec5c66b8097262495ce3f3fd6e5efbf8bebc77452174e5b6cf36c899"} Feb 27 08:51:21 crc kubenswrapper[4906]: I0227 08:51:21.123650 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d","Type":"ContainerStarted","Data":"c2c5d2a921de426ea19cb191d9327ef79a0a4d59ee6e336a5688af0c0c68a9d9"} Feb 27 08:51:21 crc kubenswrapper[4906]: I0227 08:51:21.124276 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="8f7fb2c4-3404-4a84-9c80-c20f3eb7539d" containerName="glance-log" containerID="cri-o://3c6179c3ca2890e438bb7820de44485d5d610e0899415eddb16ef6a40dbcb020" gracePeriod=30 Feb 27 08:51:21 crc kubenswrapper[4906]: I0227 08:51:21.124464 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="8f7fb2c4-3404-4a84-9c80-c20f3eb7539d" containerName="glance-httpd" containerID="cri-o://c2c5d2a921de426ea19cb191d9327ef79a0a4d59ee6e336a5688af0c0c68a9d9" gracePeriod=30 Feb 27 08:51:21 crc kubenswrapper[4906]: I0227 08:51:21.138981 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.138950408 podStartE2EDuration="5.138950408s" podCreationTimestamp="2026-02-27 08:51:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:51:21.132043327 +0000 UTC m=+1379.526444967" watchObservedRunningTime="2026-02-27 08:51:21.138950408 +0000 UTC m=+1379.533352018" Feb 27 08:51:21 crc kubenswrapper[4906]: I0227 08:51:21.162554 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.162525976 podStartE2EDuration="6.162525976s" podCreationTimestamp="2026-02-27 08:51:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:51:21.158647694 +0000 UTC m=+1379.553049294" watchObservedRunningTime="2026-02-27 08:51:21.162525976 +0000 UTC m=+1379.556927586" Feb 27 08:51:21 crc kubenswrapper[4906]: I0227 08:51:21.934998 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.086619 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-logs\") pod \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.090056 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.090319 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-config-data\") pod \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.090371 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-httpd-run\") pod \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.090614 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-combined-ca-bundle\") pod \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.090661 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-scripts\") pod \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.090684 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8v9n\" (UniqueName: \"kubernetes.io/projected/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-kube-api-access-h8v9n\") pod \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\" (UID: \"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d\") " Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.095908 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "8f7fb2c4-3404-4a84-9c80-c20f3eb7539d" (UID: "8f7fb2c4-3404-4a84-9c80-c20f3eb7539d"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.114954 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-logs" (OuterVolumeSpecName: "logs") pod "8f7fb2c4-3404-4a84-9c80-c20f3eb7539d" (UID: "8f7fb2c4-3404-4a84-9c80-c20f3eb7539d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.143103 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "8f7fb2c4-3404-4a84-9c80-c20f3eb7539d" (UID: "8f7fb2c4-3404-4a84-9c80-c20f3eb7539d"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.149190 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-scripts" (OuterVolumeSpecName: "scripts") pod "8f7fb2c4-3404-4a84-9c80-c20f3eb7539d" (UID: "8f7fb2c4-3404-4a84-9c80-c20f3eb7539d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.150016 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-kube-api-access-h8v9n" (OuterVolumeSpecName: "kube-api-access-h8v9n") pod "8f7fb2c4-3404-4a84-9c80-c20f3eb7539d" (UID: "8f7fb2c4-3404-4a84-9c80-c20f3eb7539d"). InnerVolumeSpecName "kube-api-access-h8v9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.189117 4906 generic.go:334] "Generic (PLEG): container finished" podID="47a1c965-c04a-4804-bcc8-067fd0235ac2" containerID="0fde501ab8d884abe10c928bd413fc198c0e15d1f8ae23b2bd1cad23a8fadec0" exitCode=0 Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.189174 4906 generic.go:334] "Generic (PLEG): container finished" podID="47a1c965-c04a-4804-bcc8-067fd0235ac2" containerID="7f9a0ff689aa6a3d888119c22f8c2a208c007d98729b9f356cefb94b093e9ea0" exitCode=143 Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.189626 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"47a1c965-c04a-4804-bcc8-067fd0235ac2","Type":"ContainerDied","Data":"0fde501ab8d884abe10c928bd413fc198c0e15d1f8ae23b2bd1cad23a8fadec0"} Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.189734 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"47a1c965-c04a-4804-bcc8-067fd0235ac2","Type":"ContainerDied","Data":"7f9a0ff689aa6a3d888119c22f8c2a208c007d98729b9f356cefb94b093e9ea0"} Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.194899 4906 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-logs\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.194964 4906 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.194975 4906 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.194986 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.194995 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8v9n\" (UniqueName: \"kubernetes.io/projected/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-kube-api-access-h8v9n\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.198963 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.199159 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.213234 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"54ba4949-578e-4c0a-94d2-0add9be8821d","Type":"ContainerStarted","Data":"daeb5e1e83b53f1af0f9d3ed986190ba46ac2bce85e21b6fa6429b1f1a0cfda1"} Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.213784 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8f7fb2c4-3404-4a84-9c80-c20f3eb7539d" (UID: "8f7fb2c4-3404-4a84-9c80-c20f3eb7539d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.233242 4906 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.245684 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c98486bd-1325-4072-bce0-a28d38ecead2","Type":"ContainerStarted","Data":"dad3534a178ee9f8978d0121e2ec6d31d41ecb29cc347c9c800a3b87fbd631b7"} Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.252856 4906 generic.go:334] "Generic (PLEG): container finished" podID="8f7fb2c4-3404-4a84-9c80-c20f3eb7539d" containerID="c2c5d2a921de426ea19cb191d9327ef79a0a4d59ee6e336a5688af0c0c68a9d9" exitCode=0 Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.252912 4906 generic.go:334] "Generic (PLEG): container finished" podID="8f7fb2c4-3404-4a84-9c80-c20f3eb7539d" containerID="3c6179c3ca2890e438bb7820de44485d5d610e0899415eddb16ef6a40dbcb020" exitCode=143 Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.252935 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d","Type":"ContainerDied","Data":"c2c5d2a921de426ea19cb191d9327ef79a0a4d59ee6e336a5688af0c0c68a9d9"} Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.252967 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d","Type":"ContainerDied","Data":"3c6179c3ca2890e438bb7820de44485d5d610e0899415eddb16ef6a40dbcb020"} Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.252977 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8f7fb2c4-3404-4a84-9c80-c20f3eb7539d","Type":"ContainerDied","Data":"535f930f249c0d0436175662ff386714f9e592e5a15366429bc9cd6801fa423e"} Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.253006 4906 scope.go:117] "RemoveContainer" containerID="c2c5d2a921de426ea19cb191d9327ef79a0a4d59ee6e336a5688af0c0c68a9d9" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.253152 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.259638 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-config-data" (OuterVolumeSpecName: "config-data") pod "8f7fb2c4-3404-4a84-9c80-c20f3eb7539d" (UID: "8f7fb2c4-3404-4a84-9c80-c20f3eb7539d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.296974 4906 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.297401 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.297473 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.308992 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.309738 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.430367 4906 scope.go:117] "RemoveContainer" containerID="3c6179c3ca2890e438bb7820de44485d5d610e0899415eddb16ef6a40dbcb020" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.441654 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.496059 4906 scope.go:117] "RemoveContainer" containerID="c2c5d2a921de426ea19cb191d9327ef79a0a4d59ee6e336a5688af0c0c68a9d9" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.500595 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/47a1c965-c04a-4804-bcc8-067fd0235ac2-httpd-run\") pod \"47a1c965-c04a-4804-bcc8-067fd0235ac2\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.500646 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47a1c965-c04a-4804-bcc8-067fd0235ac2-combined-ca-bundle\") pod \"47a1c965-c04a-4804-bcc8-067fd0235ac2\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.500751 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47a1c965-c04a-4804-bcc8-067fd0235ac2-logs\") pod \"47a1c965-c04a-4804-bcc8-067fd0235ac2\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.500769 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47a1c965-c04a-4804-bcc8-067fd0235ac2-config-data\") pod \"47a1c965-c04a-4804-bcc8-067fd0235ac2\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.500869 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47a1c965-c04a-4804-bcc8-067fd0235ac2-scripts\") pod \"47a1c965-c04a-4804-bcc8-067fd0235ac2\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.500979 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"47a1c965-c04a-4804-bcc8-067fd0235ac2\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.501011 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf7gm\" (UniqueName: \"kubernetes.io/projected/47a1c965-c04a-4804-bcc8-067fd0235ac2-kube-api-access-bf7gm\") pod \"47a1c965-c04a-4804-bcc8-067fd0235ac2\" (UID: \"47a1c965-c04a-4804-bcc8-067fd0235ac2\") " Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.501352 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47a1c965-c04a-4804-bcc8-067fd0235ac2-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "47a1c965-c04a-4804-bcc8-067fd0235ac2" (UID: "47a1c965-c04a-4804-bcc8-067fd0235ac2"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.501722 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47a1c965-c04a-4804-bcc8-067fd0235ac2-logs" (OuterVolumeSpecName: "logs") pod "47a1c965-c04a-4804-bcc8-067fd0235ac2" (UID: "47a1c965-c04a-4804-bcc8-067fd0235ac2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.501987 4906 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/47a1c965-c04a-4804-bcc8-067fd0235ac2-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.502007 4906 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47a1c965-c04a-4804-bcc8-067fd0235ac2-logs\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:22 crc kubenswrapper[4906]: E0227 08:51:22.506151 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2c5d2a921de426ea19cb191d9327ef79a0a4d59ee6e336a5688af0c0c68a9d9\": container with ID starting with c2c5d2a921de426ea19cb191d9327ef79a0a4d59ee6e336a5688af0c0c68a9d9 not found: ID does not exist" containerID="c2c5d2a921de426ea19cb191d9327ef79a0a4d59ee6e336a5688af0c0c68a9d9" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.506248 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2c5d2a921de426ea19cb191d9327ef79a0a4d59ee6e336a5688af0c0c68a9d9"} err="failed to get container status \"c2c5d2a921de426ea19cb191d9327ef79a0a4d59ee6e336a5688af0c0c68a9d9\": rpc error: code = NotFound desc = could not find container \"c2c5d2a921de426ea19cb191d9327ef79a0a4d59ee6e336a5688af0c0c68a9d9\": container with ID starting with c2c5d2a921de426ea19cb191d9327ef79a0a4d59ee6e336a5688af0c0c68a9d9 not found: ID does not exist" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.506288 4906 scope.go:117] "RemoveContainer" containerID="3c6179c3ca2890e438bb7820de44485d5d610e0899415eddb16ef6a40dbcb020" Feb 27 08:51:22 crc kubenswrapper[4906]: E0227 08:51:22.508264 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c6179c3ca2890e438bb7820de44485d5d610e0899415eddb16ef6a40dbcb020\": container with ID starting with 3c6179c3ca2890e438bb7820de44485d5d610e0899415eddb16ef6a40dbcb020 not found: ID does not exist" containerID="3c6179c3ca2890e438bb7820de44485d5d610e0899415eddb16ef6a40dbcb020" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.508313 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c6179c3ca2890e438bb7820de44485d5d610e0899415eddb16ef6a40dbcb020"} err="failed to get container status \"3c6179c3ca2890e438bb7820de44485d5d610e0899415eddb16ef6a40dbcb020\": rpc error: code = NotFound desc = could not find container \"3c6179c3ca2890e438bb7820de44485d5d610e0899415eddb16ef6a40dbcb020\": container with ID starting with 3c6179c3ca2890e438bb7820de44485d5d610e0899415eddb16ef6a40dbcb020 not found: ID does not exist" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.508344 4906 scope.go:117] "RemoveContainer" containerID="c2c5d2a921de426ea19cb191d9327ef79a0a4d59ee6e336a5688af0c0c68a9d9" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.509379 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2c5d2a921de426ea19cb191d9327ef79a0a4d59ee6e336a5688af0c0c68a9d9"} err="failed to get container status \"c2c5d2a921de426ea19cb191d9327ef79a0a4d59ee6e336a5688af0c0c68a9d9\": rpc error: code = NotFound desc = could not find container \"c2c5d2a921de426ea19cb191d9327ef79a0a4d59ee6e336a5688af0c0c68a9d9\": container with ID starting with c2c5d2a921de426ea19cb191d9327ef79a0a4d59ee6e336a5688af0c0c68a9d9 not found: ID does not exist" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.509461 4906 scope.go:117] "RemoveContainer" containerID="3c6179c3ca2890e438bb7820de44485d5d610e0899415eddb16ef6a40dbcb020" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.510292 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c6179c3ca2890e438bb7820de44485d5d610e0899415eddb16ef6a40dbcb020"} err="failed to get container status \"3c6179c3ca2890e438bb7820de44485d5d610e0899415eddb16ef6a40dbcb020\": rpc error: code = NotFound desc = could not find container \"3c6179c3ca2890e438bb7820de44485d5d610e0899415eddb16ef6a40dbcb020\": container with ID starting with 3c6179c3ca2890e438bb7820de44485d5d610e0899415eddb16ef6a40dbcb020 not found: ID does not exist" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.526008 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47a1c965-c04a-4804-bcc8-067fd0235ac2-kube-api-access-bf7gm" (OuterVolumeSpecName: "kube-api-access-bf7gm") pod "47a1c965-c04a-4804-bcc8-067fd0235ac2" (UID: "47a1c965-c04a-4804-bcc8-067fd0235ac2"). InnerVolumeSpecName "kube-api-access-bf7gm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.527495 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47a1c965-c04a-4804-bcc8-067fd0235ac2-scripts" (OuterVolumeSpecName: "scripts") pod "47a1c965-c04a-4804-bcc8-067fd0235ac2" (UID: "47a1c965-c04a-4804-bcc8-067fd0235ac2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.528842 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "47a1c965-c04a-4804-bcc8-067fd0235ac2" (UID: "47a1c965-c04a-4804-bcc8-067fd0235ac2"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.606383 4906 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.606793 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf7gm\" (UniqueName: \"kubernetes.io/projected/47a1c965-c04a-4804-bcc8-067fd0235ac2-kube-api-access-bf7gm\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.606937 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47a1c965-c04a-4804-bcc8-067fd0235ac2-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.755596 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47a1c965-c04a-4804-bcc8-067fd0235ac2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "47a1c965-c04a-4804-bcc8-067fd0235ac2" (UID: "47a1c965-c04a-4804-bcc8-067fd0235ac2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.759826 4906 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.767686 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47a1c965-c04a-4804-bcc8-067fd0235ac2-config-data" (OuterVolumeSpecName: "config-data") pod "47a1c965-c04a-4804-bcc8-067fd0235ac2" (UID: "47a1c965-c04a-4804-bcc8-067fd0235ac2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.810854 4906 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.810905 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47a1c965-c04a-4804-bcc8-067fd0235ac2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:22 crc kubenswrapper[4906]: I0227 08:51:22.810917 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47a1c965-c04a-4804-bcc8-067fd0235ac2-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.266671 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"47a1c965-c04a-4804-bcc8-067fd0235ac2","Type":"ContainerDied","Data":"83faa194da7206d1279298b587e70eb59c465d16200d9051cc553b5e4ed12263"} Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.267099 4906 scope.go:117] "RemoveContainer" containerID="0fde501ab8d884abe10c928bd413fc198c0e15d1f8ae23b2bd1cad23a8fadec0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.267117 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.270952 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"54ba4949-578e-4c0a-94d2-0add9be8821d","Type":"ContainerStarted","Data":"dcbca9bbfa08ac23a876f9d1e338833dc5a2ee1265b1b915283015e7b3f0c54d"} Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.279278 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c98486bd-1325-4072-bce0-a28d38ecead2","Type":"ContainerStarted","Data":"7fe4b0a0f9ab9048ff309c6dbe8414734c4e5004872c6429d2235afe206c6d8b"} Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.279357 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c98486bd-1325-4072-bce0-a28d38ecead2","Type":"ContainerStarted","Data":"f5a233a910d5db00ba868c55d300d84dd164100ef982cddd6a65c47eb2915067"} Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.279368 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c98486bd-1325-4072-bce0-a28d38ecead2","Type":"ContainerStarted","Data":"9089f203037cb46600994dc238b0412ad747c060d191aa9ec55e2b6eed8a6dd4"} Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.294674 4906 scope.go:117] "RemoveContainer" containerID="7f9a0ff689aa6a3d888119c22f8c2a208c007d98729b9f356cefb94b093e9ea0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.310647 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.328979 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.352995 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 27 08:51:23 crc kubenswrapper[4906]: E0227 08:51:23.353418 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f7fb2c4-3404-4a84-9c80-c20f3eb7539d" containerName="glance-log" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.353438 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f7fb2c4-3404-4a84-9c80-c20f3eb7539d" containerName="glance-log" Feb 27 08:51:23 crc kubenswrapper[4906]: E0227 08:51:23.353461 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47a1c965-c04a-4804-bcc8-067fd0235ac2" containerName="glance-log" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.353469 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="47a1c965-c04a-4804-bcc8-067fd0235ac2" containerName="glance-log" Feb 27 08:51:23 crc kubenswrapper[4906]: E0227 08:51:23.353476 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f7fb2c4-3404-4a84-9c80-c20f3eb7539d" containerName="glance-httpd" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.353482 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f7fb2c4-3404-4a84-9c80-c20f3eb7539d" containerName="glance-httpd" Feb 27 08:51:23 crc kubenswrapper[4906]: E0227 08:51:23.353491 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47a1c965-c04a-4804-bcc8-067fd0235ac2" containerName="glance-httpd" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.353497 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="47a1c965-c04a-4804-bcc8-067fd0235ac2" containerName="glance-httpd" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.353669 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="47a1c965-c04a-4804-bcc8-067fd0235ac2" containerName="glance-httpd" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.353686 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f7fb2c4-3404-4a84-9c80-c20f3eb7539d" containerName="glance-log" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.353697 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f7fb2c4-3404-4a84-9c80-c20f3eb7539d" containerName="glance-httpd" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.353704 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="47a1c965-c04a-4804-bcc8-067fd0235ac2" containerName="glance-log" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.357680 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.368276 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.368376 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-rc48q" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.369119 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.369421 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.377854 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.423114 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92423fdd-529f-46e5-8eff-6241f4a41225-logs\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.423168 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.423255 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-scripts\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.423298 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.423342 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtmcc\" (UniqueName: \"kubernetes.io/projected/92423fdd-529f-46e5-8eff-6241f4a41225-kube-api-access-xtmcc\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.423363 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/92423fdd-529f-46e5-8eff-6241f4a41225-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.423396 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-config-data\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.423424 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.525137 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-scripts\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.525529 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.525629 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtmcc\" (UniqueName: \"kubernetes.io/projected/92423fdd-529f-46e5-8eff-6241f4a41225-kube-api-access-xtmcc\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.525724 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/92423fdd-529f-46e5-8eff-6241f4a41225-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.525825 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-config-data\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.525939 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.526096 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92423fdd-529f-46e5-8eff-6241f4a41225-logs\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.526181 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.527035 4906 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.528463 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/92423fdd-529f-46e5-8eff-6241f4a41225-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.528949 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92423fdd-529f-46e5-8eff-6241f4a41225-logs\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.538093 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-scripts\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.541191 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-config-data\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.544122 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.549701 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.550012 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtmcc\" (UniqueName: \"kubernetes.io/projected/92423fdd-529f-46e5-8eff-6241f4a41225-kube-api-access-xtmcc\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.572143 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:51:23 crc kubenswrapper[4906]: I0227 08:51:23.681513 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 27 08:51:24 crc kubenswrapper[4906]: I0227 08:51:24.286735 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 27 08:51:24 crc kubenswrapper[4906]: W0227 08:51:24.292193 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92423fdd_529f_46e5_8eff_6241f4a41225.slice/crio-41acc78ef1918a15d91766f8fe646264e7a418a42bb9f15ba1d5487c3fa09d72 WatchSource:0}: Error finding container 41acc78ef1918a15d91766f8fe646264e7a418a42bb9f15ba1d5487c3fa09d72: Status 404 returned error can't find the container with id 41acc78ef1918a15d91766f8fe646264e7a418a42bb9f15ba1d5487c3fa09d72 Feb 27 08:51:24 crc kubenswrapper[4906]: I0227 08:51:24.295672 4906 generic.go:334] "Generic (PLEG): container finished" podID="b36228db-b66d-4815-ac1c-e58b85ee3bbf" containerID="60cfeacf822988c4a0a71e9f4902d3c855faa08dabb8cce270594b0a54784f19" exitCode=0 Feb 27 08:51:24 crc kubenswrapper[4906]: I0227 08:51:24.295761 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-d4fhc" event={"ID":"b36228db-b66d-4815-ac1c-e58b85ee3bbf","Type":"ContainerDied","Data":"60cfeacf822988c4a0a71e9f4902d3c855faa08dabb8cce270594b0a54784f19"} Feb 27 08:51:24 crc kubenswrapper[4906]: I0227 08:51:24.566199 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47a1c965-c04a-4804-bcc8-067fd0235ac2" path="/var/lib/kubelet/pods/47a1c965-c04a-4804-bcc8-067fd0235ac2/volumes" Feb 27 08:51:25 crc kubenswrapper[4906]: I0227 08:51:25.334500 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"92423fdd-529f-46e5-8eff-6241f4a41225","Type":"ContainerStarted","Data":"2a17575ed3c2abf6315d8714bb38ce68cde7100200e1d436b228688bf99d3aea"} Feb 27 08:51:25 crc kubenswrapper[4906]: I0227 08:51:25.334822 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"92423fdd-529f-46e5-8eff-6241f4a41225","Type":"ContainerStarted","Data":"41acc78ef1918a15d91766f8fe646264e7a418a42bb9f15ba1d5487c3fa09d72"} Feb 27 08:51:25 crc kubenswrapper[4906]: I0227 08:51:25.782805 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-d4fhc" Feb 27 08:51:25 crc kubenswrapper[4906]: I0227 08:51:25.887747 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b36228db-b66d-4815-ac1c-e58b85ee3bbf-db-sync-config-data\") pod \"b36228db-b66d-4815-ac1c-e58b85ee3bbf\" (UID: \"b36228db-b66d-4815-ac1c-e58b85ee3bbf\") " Feb 27 08:51:25 crc kubenswrapper[4906]: I0227 08:51:25.887864 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tjhlp\" (UniqueName: \"kubernetes.io/projected/b36228db-b66d-4815-ac1c-e58b85ee3bbf-kube-api-access-tjhlp\") pod \"b36228db-b66d-4815-ac1c-e58b85ee3bbf\" (UID: \"b36228db-b66d-4815-ac1c-e58b85ee3bbf\") " Feb 27 08:51:25 crc kubenswrapper[4906]: I0227 08:51:25.888012 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b36228db-b66d-4815-ac1c-e58b85ee3bbf-combined-ca-bundle\") pod \"b36228db-b66d-4815-ac1c-e58b85ee3bbf\" (UID: \"b36228db-b66d-4815-ac1c-e58b85ee3bbf\") " Feb 27 08:51:25 crc kubenswrapper[4906]: I0227 08:51:25.893171 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b36228db-b66d-4815-ac1c-e58b85ee3bbf-kube-api-access-tjhlp" (OuterVolumeSpecName: "kube-api-access-tjhlp") pod "b36228db-b66d-4815-ac1c-e58b85ee3bbf" (UID: "b36228db-b66d-4815-ac1c-e58b85ee3bbf"). InnerVolumeSpecName "kube-api-access-tjhlp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:51:25 crc kubenswrapper[4906]: I0227 08:51:25.893777 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b36228db-b66d-4815-ac1c-e58b85ee3bbf-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "b36228db-b66d-4815-ac1c-e58b85ee3bbf" (UID: "b36228db-b66d-4815-ac1c-e58b85ee3bbf"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:25 crc kubenswrapper[4906]: I0227 08:51:25.924124 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b36228db-b66d-4815-ac1c-e58b85ee3bbf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b36228db-b66d-4815-ac1c-e58b85ee3bbf" (UID: "b36228db-b66d-4815-ac1c-e58b85ee3bbf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:25 crc kubenswrapper[4906]: I0227 08:51:25.990774 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b36228db-b66d-4815-ac1c-e58b85ee3bbf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:25 crc kubenswrapper[4906]: I0227 08:51:25.991235 4906 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/b36228db-b66d-4815-ac1c-e58b85ee3bbf-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:25 crc kubenswrapper[4906]: I0227 08:51:25.991248 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tjhlp\" (UniqueName: \"kubernetes.io/projected/b36228db-b66d-4815-ac1c-e58b85ee3bbf-kube-api-access-tjhlp\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.071086 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.153570 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68dcc9cf6f-89b8b"] Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.153954 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" podUID="afee5cf7-dd4d-490f-be62-3fbea3170858" containerName="dnsmasq-dns" containerID="cri-o://bafe0dcaa7a2dc47acf5bf79d98ab0d7f301672314f840a0777763c8a01dc4ec" gracePeriod=10 Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.390532 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-d4fhc" event={"ID":"b36228db-b66d-4815-ac1c-e58b85ee3bbf","Type":"ContainerDied","Data":"8e1e49708351d22691bae6eb8e86b348618839dadb3cf168f6df8a05f6fd12b0"} Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.390581 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8e1e49708351d22691bae6eb8e86b348618839dadb3cf168f6df8a05f6fd12b0" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.390653 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-d4fhc" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.405130 4906 generic.go:334] "Generic (PLEG): container finished" podID="afee5cf7-dd4d-490f-be62-3fbea3170858" containerID="bafe0dcaa7a2dc47acf5bf79d98ab0d7f301672314f840a0777763c8a01dc4ec" exitCode=0 Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.405197 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" event={"ID":"afee5cf7-dd4d-490f-be62-3fbea3170858","Type":"ContainerDied","Data":"bafe0dcaa7a2dc47acf5bf79d98ab0d7f301672314f840a0777763c8a01dc4ec"} Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.429498 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"54ba4949-578e-4c0a-94d2-0add9be8821d","Type":"ContainerStarted","Data":"2163e99c114663ba9c16529fefc749c69ad4a034148d9011a16364b105692df1"} Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.432045 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.488977 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c98486bd-1325-4072-bce0-a28d38ecead2","Type":"ContainerStarted","Data":"6cf45628f8c6e1ea3eb12c0089f214bd5c89fb29b41976a8023b2444a46689a1"} Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.489344 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c98486bd-1325-4072-bce0-a28d38ecead2","Type":"ContainerStarted","Data":"7ab8d91553a061063fcef218a5c5953db8d3395b957fdb6be57a5f6bddfce6d7"} Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.507717 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.187385238 podStartE2EDuration="8.507692002s" podCreationTimestamp="2026-02-27 08:51:18 +0000 UTC" firstStartedPulling="2026-02-27 08:51:19.287802442 +0000 UTC m=+1377.682204052" lastFinishedPulling="2026-02-27 08:51:25.608109206 +0000 UTC m=+1384.002510816" observedRunningTime="2026-02-27 08:51:26.457837045 +0000 UTC m=+1384.852238645" watchObservedRunningTime="2026-02-27 08:51:26.507692002 +0000 UTC m=+1384.902093612" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.643071 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-688678b65d-rntkp"] Feb 27 08:51:26 crc kubenswrapper[4906]: E0227 08:51:26.643610 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b36228db-b66d-4815-ac1c-e58b85ee3bbf" containerName="barbican-db-sync" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.643627 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="b36228db-b66d-4815-ac1c-e58b85ee3bbf" containerName="barbican-db-sync" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.643810 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="b36228db-b66d-4815-ac1c-e58b85ee3bbf" containerName="barbican-db-sync" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.644906 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-688678b65d-rntkp" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.668009 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.668327 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.668797 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-qfclq" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.669835 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-6c9bf8484c-xcvbs"] Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.672041 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6c9bf8484c-xcvbs" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.678280 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.731971 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-688678b65d-rntkp"] Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.733536 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3afec785-161c-4ca3-bc22-0c958826c2db-config-data-custom\") pod \"barbican-worker-6c9bf8484c-xcvbs\" (UID: \"3afec785-161c-4ca3-bc22-0c958826c2db\") " pod="openstack/barbican-worker-6c9bf8484c-xcvbs" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.733635 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3afec785-161c-4ca3-bc22-0c958826c2db-logs\") pod \"barbican-worker-6c9bf8484c-xcvbs\" (UID: \"3afec785-161c-4ca3-bc22-0c958826c2db\") " pod="openstack/barbican-worker-6c9bf8484c-xcvbs" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.733660 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ba40093-618b-4802-807d-91b1686f98c6-config-data\") pod \"barbican-keystone-listener-688678b65d-rntkp\" (UID: \"8ba40093-618b-4802-807d-91b1686f98c6\") " pod="openstack/barbican-keystone-listener-688678b65d-rntkp" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.733691 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzhcm\" (UniqueName: \"kubernetes.io/projected/8ba40093-618b-4802-807d-91b1686f98c6-kube-api-access-bzhcm\") pod \"barbican-keystone-listener-688678b65d-rntkp\" (UID: \"8ba40093-618b-4802-807d-91b1686f98c6\") " pod="openstack/barbican-keystone-listener-688678b65d-rntkp" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.733716 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3afec785-161c-4ca3-bc22-0c958826c2db-combined-ca-bundle\") pod \"barbican-worker-6c9bf8484c-xcvbs\" (UID: \"3afec785-161c-4ca3-bc22-0c958826c2db\") " pod="openstack/barbican-worker-6c9bf8484c-xcvbs" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.733744 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8ba40093-618b-4802-807d-91b1686f98c6-config-data-custom\") pod \"barbican-keystone-listener-688678b65d-rntkp\" (UID: \"8ba40093-618b-4802-807d-91b1686f98c6\") " pod="openstack/barbican-keystone-listener-688678b65d-rntkp" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.733760 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnjfw\" (UniqueName: \"kubernetes.io/projected/3afec785-161c-4ca3-bc22-0c958826c2db-kube-api-access-cnjfw\") pod \"barbican-worker-6c9bf8484c-xcvbs\" (UID: \"3afec785-161c-4ca3-bc22-0c958826c2db\") " pod="openstack/barbican-worker-6c9bf8484c-xcvbs" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.733792 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3afec785-161c-4ca3-bc22-0c958826c2db-config-data\") pod \"barbican-worker-6c9bf8484c-xcvbs\" (UID: \"3afec785-161c-4ca3-bc22-0c958826c2db\") " pod="openstack/barbican-worker-6c9bf8484c-xcvbs" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.733815 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ba40093-618b-4802-807d-91b1686f98c6-logs\") pod \"barbican-keystone-listener-688678b65d-rntkp\" (UID: \"8ba40093-618b-4802-807d-91b1686f98c6\") " pod="openstack/barbican-keystone-listener-688678b65d-rntkp" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.733851 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ba40093-618b-4802-807d-91b1686f98c6-combined-ca-bundle\") pod \"barbican-keystone-listener-688678b65d-rntkp\" (UID: \"8ba40093-618b-4802-807d-91b1686f98c6\") " pod="openstack/barbican-keystone-listener-688678b65d-rntkp" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.779746 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6c9bf8484c-xcvbs"] Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.838564 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3afec785-161c-4ca3-bc22-0c958826c2db-combined-ca-bundle\") pod \"barbican-worker-6c9bf8484c-xcvbs\" (UID: \"3afec785-161c-4ca3-bc22-0c958826c2db\") " pod="openstack/barbican-worker-6c9bf8484c-xcvbs" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.838655 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8ba40093-618b-4802-807d-91b1686f98c6-config-data-custom\") pod \"barbican-keystone-listener-688678b65d-rntkp\" (UID: \"8ba40093-618b-4802-807d-91b1686f98c6\") " pod="openstack/barbican-keystone-listener-688678b65d-rntkp" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.838692 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnjfw\" (UniqueName: \"kubernetes.io/projected/3afec785-161c-4ca3-bc22-0c958826c2db-kube-api-access-cnjfw\") pod \"barbican-worker-6c9bf8484c-xcvbs\" (UID: \"3afec785-161c-4ca3-bc22-0c958826c2db\") " pod="openstack/barbican-worker-6c9bf8484c-xcvbs" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.838749 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3afec785-161c-4ca3-bc22-0c958826c2db-config-data\") pod \"barbican-worker-6c9bf8484c-xcvbs\" (UID: \"3afec785-161c-4ca3-bc22-0c958826c2db\") " pod="openstack/barbican-worker-6c9bf8484c-xcvbs" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.838782 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ba40093-618b-4802-807d-91b1686f98c6-logs\") pod \"barbican-keystone-listener-688678b65d-rntkp\" (UID: \"8ba40093-618b-4802-807d-91b1686f98c6\") " pod="openstack/barbican-keystone-listener-688678b65d-rntkp" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.838838 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ba40093-618b-4802-807d-91b1686f98c6-combined-ca-bundle\") pod \"barbican-keystone-listener-688678b65d-rntkp\" (UID: \"8ba40093-618b-4802-807d-91b1686f98c6\") " pod="openstack/barbican-keystone-listener-688678b65d-rntkp" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.838899 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3afec785-161c-4ca3-bc22-0c958826c2db-config-data-custom\") pod \"barbican-worker-6c9bf8484c-xcvbs\" (UID: \"3afec785-161c-4ca3-bc22-0c958826c2db\") " pod="openstack/barbican-worker-6c9bf8484c-xcvbs" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.839038 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3afec785-161c-4ca3-bc22-0c958826c2db-logs\") pod \"barbican-worker-6c9bf8484c-xcvbs\" (UID: \"3afec785-161c-4ca3-bc22-0c958826c2db\") " pod="openstack/barbican-worker-6c9bf8484c-xcvbs" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.839090 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ba40093-618b-4802-807d-91b1686f98c6-config-data\") pod \"barbican-keystone-listener-688678b65d-rntkp\" (UID: \"8ba40093-618b-4802-807d-91b1686f98c6\") " pod="openstack/barbican-keystone-listener-688678b65d-rntkp" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.839173 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzhcm\" (UniqueName: \"kubernetes.io/projected/8ba40093-618b-4802-807d-91b1686f98c6-kube-api-access-bzhcm\") pod \"barbican-keystone-listener-688678b65d-rntkp\" (UID: \"8ba40093-618b-4802-807d-91b1686f98c6\") " pod="openstack/barbican-keystone-listener-688678b65d-rntkp" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.840610 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8ba40093-618b-4802-807d-91b1686f98c6-logs\") pod \"barbican-keystone-listener-688678b65d-rntkp\" (UID: \"8ba40093-618b-4802-807d-91b1686f98c6\") " pod="openstack/barbican-keystone-listener-688678b65d-rntkp" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.846446 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3afec785-161c-4ca3-bc22-0c958826c2db-logs\") pod \"barbican-worker-6c9bf8484c-xcvbs\" (UID: \"3afec785-161c-4ca3-bc22-0c958826c2db\") " pod="openstack/barbican-worker-6c9bf8484c-xcvbs" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.871651 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3afec785-161c-4ca3-bc22-0c958826c2db-combined-ca-bundle\") pod \"barbican-worker-6c9bf8484c-xcvbs\" (UID: \"3afec785-161c-4ca3-bc22-0c958826c2db\") " pod="openstack/barbican-worker-6c9bf8484c-xcvbs" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.885615 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ba40093-618b-4802-807d-91b1686f98c6-combined-ca-bundle\") pod \"barbican-keystone-listener-688678b65d-rntkp\" (UID: \"8ba40093-618b-4802-807d-91b1686f98c6\") " pod="openstack/barbican-keystone-listener-688678b65d-rntkp" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.896093 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3afec785-161c-4ca3-bc22-0c958826c2db-config-data\") pod \"barbican-worker-6c9bf8484c-xcvbs\" (UID: \"3afec785-161c-4ca3-bc22-0c958826c2db\") " pod="openstack/barbican-worker-6c9bf8484c-xcvbs" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.901860 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3afec785-161c-4ca3-bc22-0c958826c2db-config-data-custom\") pod \"barbican-worker-6c9bf8484c-xcvbs\" (UID: \"3afec785-161c-4ca3-bc22-0c958826c2db\") " pod="openstack/barbican-worker-6c9bf8484c-xcvbs" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.912241 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnjfw\" (UniqueName: \"kubernetes.io/projected/3afec785-161c-4ca3-bc22-0c958826c2db-kube-api-access-cnjfw\") pod \"barbican-worker-6c9bf8484c-xcvbs\" (UID: \"3afec785-161c-4ca3-bc22-0c958826c2db\") " pod="openstack/barbican-worker-6c9bf8484c-xcvbs" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.913862 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ba40093-618b-4802-807d-91b1686f98c6-config-data\") pod \"barbican-keystone-listener-688678b65d-rntkp\" (UID: \"8ba40093-618b-4802-807d-91b1686f98c6\") " pod="openstack/barbican-keystone-listener-688678b65d-rntkp" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.916280 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzhcm\" (UniqueName: \"kubernetes.io/projected/8ba40093-618b-4802-807d-91b1686f98c6-kube-api-access-bzhcm\") pod \"barbican-keystone-listener-688678b65d-rntkp\" (UID: \"8ba40093-618b-4802-807d-91b1686f98c6\") " pod="openstack/barbican-keystone-listener-688678b65d-rntkp" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.927662 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8ba40093-618b-4802-807d-91b1686f98c6-config-data-custom\") pod \"barbican-keystone-listener-688678b65d-rntkp\" (UID: \"8ba40093-618b-4802-807d-91b1686f98c6\") " pod="openstack/barbican-keystone-listener-688678b65d-rntkp" Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.929944 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7d649d8c65-kdhln"] Feb 27 08:51:26 crc kubenswrapper[4906]: I0227 08:51:26.931812 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.043284 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-688678b65d-rntkp" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.043470 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-6c9bf8484c-xcvbs" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.056791 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-ovsdbserver-nb\") pod \"dnsmasq-dns-7d649d8c65-kdhln\" (UID: \"76948b3e-a832-499b-a662-49e3669030d4\") " pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.056873 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-config\") pod \"dnsmasq-dns-7d649d8c65-kdhln\" (UID: \"76948b3e-a832-499b-a662-49e3669030d4\") " pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.056932 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-ovsdbserver-sb\") pod \"dnsmasq-dns-7d649d8c65-kdhln\" (UID: \"76948b3e-a832-499b-a662-49e3669030d4\") " pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.057996 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-dns-svc\") pod \"dnsmasq-dns-7d649d8c65-kdhln\" (UID: \"76948b3e-a832-499b-a662-49e3669030d4\") " pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.060949 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjg2g\" (UniqueName: \"kubernetes.io/projected/76948b3e-a832-499b-a662-49e3669030d4-kube-api-access-zjg2g\") pod \"dnsmasq-dns-7d649d8c65-kdhln\" (UID: \"76948b3e-a832-499b-a662-49e3669030d4\") " pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.077196 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7d649d8c65-kdhln"] Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.102027 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5dfbf7b87b-s7l5j"] Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.103781 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.110136 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.119212 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.157734 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5dfbf7b87b-s7l5j"] Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.166204 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-ovsdbserver-nb\") pod \"afee5cf7-dd4d-490f-be62-3fbea3170858\" (UID: \"afee5cf7-dd4d-490f-be62-3fbea3170858\") " Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.166284 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-config\") pod \"afee5cf7-dd4d-490f-be62-3fbea3170858\" (UID: \"afee5cf7-dd4d-490f-be62-3fbea3170858\") " Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.166316 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-dns-svc\") pod \"afee5cf7-dd4d-490f-be62-3fbea3170858\" (UID: \"afee5cf7-dd4d-490f-be62-3fbea3170858\") " Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.166400 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-ovsdbserver-sb\") pod \"afee5cf7-dd4d-490f-be62-3fbea3170858\" (UID: \"afee5cf7-dd4d-490f-be62-3fbea3170858\") " Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.166465 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9z95\" (UniqueName: \"kubernetes.io/projected/afee5cf7-dd4d-490f-be62-3fbea3170858-kube-api-access-x9z95\") pod \"afee5cf7-dd4d-490f-be62-3fbea3170858\" (UID: \"afee5cf7-dd4d-490f-be62-3fbea3170858\") " Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.166764 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f95124f9-c5e8-4445-9885-03cc47172f2f-combined-ca-bundle\") pod \"barbican-api-5dfbf7b87b-s7l5j\" (UID: \"f95124f9-c5e8-4445-9885-03cc47172f2f\") " pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.166857 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-ovsdbserver-nb\") pod \"dnsmasq-dns-7d649d8c65-kdhln\" (UID: \"76948b3e-a832-499b-a662-49e3669030d4\") " pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.166903 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-config\") pod \"dnsmasq-dns-7d649d8c65-kdhln\" (UID: \"76948b3e-a832-499b-a662-49e3669030d4\") " pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.166927 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-ovsdbserver-sb\") pod \"dnsmasq-dns-7d649d8c65-kdhln\" (UID: \"76948b3e-a832-499b-a662-49e3669030d4\") " pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.166968 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvbpt\" (UniqueName: \"kubernetes.io/projected/f95124f9-c5e8-4445-9885-03cc47172f2f-kube-api-access-jvbpt\") pod \"barbican-api-5dfbf7b87b-s7l5j\" (UID: \"f95124f9-c5e8-4445-9885-03cc47172f2f\") " pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.166995 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f95124f9-c5e8-4445-9885-03cc47172f2f-logs\") pod \"barbican-api-5dfbf7b87b-s7l5j\" (UID: \"f95124f9-c5e8-4445-9885-03cc47172f2f\") " pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.167018 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-dns-svc\") pod \"dnsmasq-dns-7d649d8c65-kdhln\" (UID: \"76948b3e-a832-499b-a662-49e3669030d4\") " pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.167040 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjg2g\" (UniqueName: \"kubernetes.io/projected/76948b3e-a832-499b-a662-49e3669030d4-kube-api-access-zjg2g\") pod \"dnsmasq-dns-7d649d8c65-kdhln\" (UID: \"76948b3e-a832-499b-a662-49e3669030d4\") " pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.167067 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f95124f9-c5e8-4445-9885-03cc47172f2f-config-data\") pod \"barbican-api-5dfbf7b87b-s7l5j\" (UID: \"f95124f9-c5e8-4445-9885-03cc47172f2f\") " pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.167164 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f95124f9-c5e8-4445-9885-03cc47172f2f-config-data-custom\") pod \"barbican-api-5dfbf7b87b-s7l5j\" (UID: \"f95124f9-c5e8-4445-9885-03cc47172f2f\") " pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.172734 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-ovsdbserver-sb\") pod \"dnsmasq-dns-7d649d8c65-kdhln\" (UID: \"76948b3e-a832-499b-a662-49e3669030d4\") " pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.173914 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-dns-svc\") pod \"dnsmasq-dns-7d649d8c65-kdhln\" (UID: \"76948b3e-a832-499b-a662-49e3669030d4\") " pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.174070 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-config\") pod \"dnsmasq-dns-7d649d8c65-kdhln\" (UID: \"76948b3e-a832-499b-a662-49e3669030d4\") " pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.174698 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-ovsdbserver-nb\") pod \"dnsmasq-dns-7d649d8c65-kdhln\" (UID: \"76948b3e-a832-499b-a662-49e3669030d4\") " pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.240174 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afee5cf7-dd4d-490f-be62-3fbea3170858-kube-api-access-x9z95" (OuterVolumeSpecName: "kube-api-access-x9z95") pod "afee5cf7-dd4d-490f-be62-3fbea3170858" (UID: "afee5cf7-dd4d-490f-be62-3fbea3170858"). InnerVolumeSpecName "kube-api-access-x9z95". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.250911 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjg2g\" (UniqueName: \"kubernetes.io/projected/76948b3e-a832-499b-a662-49e3669030d4-kube-api-access-zjg2g\") pod \"dnsmasq-dns-7d649d8c65-kdhln\" (UID: \"76948b3e-a832-499b-a662-49e3669030d4\") " pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.268628 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f95124f9-c5e8-4445-9885-03cc47172f2f-config-data-custom\") pod \"barbican-api-5dfbf7b87b-s7l5j\" (UID: \"f95124f9-c5e8-4445-9885-03cc47172f2f\") " pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.268703 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f95124f9-c5e8-4445-9885-03cc47172f2f-combined-ca-bundle\") pod \"barbican-api-5dfbf7b87b-s7l5j\" (UID: \"f95124f9-c5e8-4445-9885-03cc47172f2f\") " pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.268800 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvbpt\" (UniqueName: \"kubernetes.io/projected/f95124f9-c5e8-4445-9885-03cc47172f2f-kube-api-access-jvbpt\") pod \"barbican-api-5dfbf7b87b-s7l5j\" (UID: \"f95124f9-c5e8-4445-9885-03cc47172f2f\") " pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.268835 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f95124f9-c5e8-4445-9885-03cc47172f2f-logs\") pod \"barbican-api-5dfbf7b87b-s7l5j\" (UID: \"f95124f9-c5e8-4445-9885-03cc47172f2f\") " pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.268899 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f95124f9-c5e8-4445-9885-03cc47172f2f-config-data\") pod \"barbican-api-5dfbf7b87b-s7l5j\" (UID: \"f95124f9-c5e8-4445-9885-03cc47172f2f\") " pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.269010 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9z95\" (UniqueName: \"kubernetes.io/projected/afee5cf7-dd4d-490f-be62-3fbea3170858-kube-api-access-x9z95\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.270773 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f95124f9-c5e8-4445-9885-03cc47172f2f-logs\") pod \"barbican-api-5dfbf7b87b-s7l5j\" (UID: \"f95124f9-c5e8-4445-9885-03cc47172f2f\") " pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.278458 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f95124f9-c5e8-4445-9885-03cc47172f2f-config-data\") pod \"barbican-api-5dfbf7b87b-s7l5j\" (UID: \"f95124f9-c5e8-4445-9885-03cc47172f2f\") " pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.293404 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f95124f9-c5e8-4445-9885-03cc47172f2f-config-data-custom\") pod \"barbican-api-5dfbf7b87b-s7l5j\" (UID: \"f95124f9-c5e8-4445-9885-03cc47172f2f\") " pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.308268 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f95124f9-c5e8-4445-9885-03cc47172f2f-combined-ca-bundle\") pod \"barbican-api-5dfbf7b87b-s7l5j\" (UID: \"f95124f9-c5e8-4445-9885-03cc47172f2f\") " pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.315779 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvbpt\" (UniqueName: \"kubernetes.io/projected/f95124f9-c5e8-4445-9885-03cc47172f2f-kube-api-access-jvbpt\") pod \"barbican-api-5dfbf7b87b-s7l5j\" (UID: \"f95124f9-c5e8-4445-9885-03cc47172f2f\") " pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.326484 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "afee5cf7-dd4d-490f-be62-3fbea3170858" (UID: "afee5cf7-dd4d-490f-be62-3fbea3170858"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.330448 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "afee5cf7-dd4d-490f-be62-3fbea3170858" (UID: "afee5cf7-dd4d-490f-be62-3fbea3170858"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.332526 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "afee5cf7-dd4d-490f-be62-3fbea3170858" (UID: "afee5cf7-dd4d-490f-be62-3fbea3170858"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.354602 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-config" (OuterVolumeSpecName: "config") pod "afee5cf7-dd4d-490f-be62-3fbea3170858" (UID: "afee5cf7-dd4d-490f-be62-3fbea3170858"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.371229 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.371456 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.371635 4906 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.371748 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/afee5cf7-dd4d-490f-be62-3fbea3170858-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.406438 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.470267 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.531526 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" event={"ID":"afee5cf7-dd4d-490f-be62-3fbea3170858","Type":"ContainerDied","Data":"ce16fc3b91c4b5ecb23564b9519ac082173980b1e9dc9ed756b246094a4e1a88"} Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.531611 4906 scope.go:117] "RemoveContainer" containerID="bafe0dcaa7a2dc47acf5bf79d98ab0d7f301672314f840a0777763c8a01dc4ec" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.531816 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68dcc9cf6f-89b8b" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.561686 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"92423fdd-529f-46e5-8eff-6241f4a41225","Type":"ContainerStarted","Data":"3000d068480fff83d0eee45968fb5fc1136ad32fb16cf8a0e3502def1179fd9d"} Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.604484 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.60446182 podStartE2EDuration="4.60446182s" podCreationTimestamp="2026-02-27 08:51:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:51:27.594689014 +0000 UTC m=+1385.989090644" watchObservedRunningTime="2026-02-27 08:51:27.60446182 +0000 UTC m=+1385.998863430" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.680063 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68dcc9cf6f-89b8b"] Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.682603 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-68dcc9cf6f-89b8b"] Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.699986 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c98486bd-1325-4072-bce0-a28d38ecead2","Type":"ContainerStarted","Data":"ca36d57565215d655b6610f6e02a16ab3caa4b5556e3371ff50abe84d4d8eff1"} Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.700108 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c98486bd-1325-4072-bce0-a28d38ecead2","Type":"ContainerStarted","Data":"feded736465c7c4aea3b5de955e165065213bbf82fe773e184881645c7f3d3c3"} Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.789713 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-6c9bf8484c-xcvbs"] Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.844238 4906 scope.go:117] "RemoveContainer" containerID="ac3501342ae7773461217efa9a338fc2231373b04af029dda0a99439df4a2c51" Feb 27 08:51:27 crc kubenswrapper[4906]: I0227 08:51:27.855826 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-688678b65d-rntkp"] Feb 27 08:51:28 crc kubenswrapper[4906]: I0227 08:51:28.100196 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5dfbf7b87b-s7l5j"] Feb 27 08:51:28 crc kubenswrapper[4906]: W0227 08:51:28.182078 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf95124f9_c5e8_4445_9885_03cc47172f2f.slice/crio-a01defaf6cfae3bb571fde4febdf38279525fc752f3ed30bf733554ae7176182 WatchSource:0}: Error finding container a01defaf6cfae3bb571fde4febdf38279525fc752f3ed30bf733554ae7176182: Status 404 returned error can't find the container with id a01defaf6cfae3bb571fde4febdf38279525fc752f3ed30bf733554ae7176182 Feb 27 08:51:28 crc kubenswrapper[4906]: I0227 08:51:28.242523 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7d649d8c65-kdhln"] Feb 27 08:51:28 crc kubenswrapper[4906]: W0227 08:51:28.310446 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76948b3e_a832_499b_a662_49e3669030d4.slice/crio-fdc18a44712faf1d3f5ace22e553d741df0944e04a0c5d8019bdcf966a64e3e7 WatchSource:0}: Error finding container fdc18a44712faf1d3f5ace22e553d741df0944e04a0c5d8019bdcf966a64e3e7: Status 404 returned error can't find the container with id fdc18a44712faf1d3f5ace22e553d741df0944e04a0c5d8019bdcf966a64e3e7 Feb 27 08:51:28 crc kubenswrapper[4906]: I0227 08:51:28.565898 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afee5cf7-dd4d-490f-be62-3fbea3170858" path="/var/lib/kubelet/pods/afee5cf7-dd4d-490f-be62-3fbea3170858/volumes" Feb 27 08:51:28 crc kubenswrapper[4906]: I0227 08:51:28.714343 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6c9bf8484c-xcvbs" event={"ID":"3afec785-161c-4ca3-bc22-0c958826c2db","Type":"ContainerStarted","Data":"07db3b807d3309fab87d0ad4914e2d9f46f4f2510d8afb3f8a66ce3085bc71cd"} Feb 27 08:51:28 crc kubenswrapper[4906]: I0227 08:51:28.715731 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" event={"ID":"76948b3e-a832-499b-a662-49e3669030d4","Type":"ContainerStarted","Data":"fdc18a44712faf1d3f5ace22e553d741df0944e04a0c5d8019bdcf966a64e3e7"} Feb 27 08:51:28 crc kubenswrapper[4906]: I0227 08:51:28.720429 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dfbf7b87b-s7l5j" event={"ID":"f95124f9-c5e8-4445-9885-03cc47172f2f","Type":"ContainerStarted","Data":"a01defaf6cfae3bb571fde4febdf38279525fc752f3ed30bf733554ae7176182"} Feb 27 08:51:28 crc kubenswrapper[4906]: I0227 08:51:28.721809 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-688678b65d-rntkp" event={"ID":"8ba40093-618b-4802-807d-91b1686f98c6","Type":"ContainerStarted","Data":"c494807fbac1ab15df33fcf5ef0046ac4e94c64ba05f67edae51b3495c3b8f8d"} Feb 27 08:51:29 crc kubenswrapper[4906]: I0227 08:51:29.737186 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dfbf7b87b-s7l5j" event={"ID":"f95124f9-c5e8-4445-9885-03cc47172f2f","Type":"ContainerStarted","Data":"21aa51804a89937917282205a22028c7baebaba56a423bd6d396a960e7be0932"} Feb 27 08:51:29 crc kubenswrapper[4906]: I0227 08:51:29.737755 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dfbf7b87b-s7l5j" event={"ID":"f95124f9-c5e8-4445-9885-03cc47172f2f","Type":"ContainerStarted","Data":"eeec9240c0f36a56a38f3e13442d988cd51447c258cf19b23a9d9bcf8ae40a3f"} Feb 27 08:51:29 crc kubenswrapper[4906]: I0227 08:51:29.738002 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:29 crc kubenswrapper[4906]: I0227 08:51:29.738054 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:29 crc kubenswrapper[4906]: I0227 08:51:29.747028 4906 generic.go:334] "Generic (PLEG): container finished" podID="76948b3e-a832-499b-a662-49e3669030d4" containerID="d7ba8617395d73ef7cba7b69b8ac7e449894227ab090f4a6fcc75ab236f25328" exitCode=0 Feb 27 08:51:29 crc kubenswrapper[4906]: I0227 08:51:29.747088 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" event={"ID":"76948b3e-a832-499b-a662-49e3669030d4","Type":"ContainerDied","Data":"d7ba8617395d73ef7cba7b69b8ac7e449894227ab090f4a6fcc75ab236f25328"} Feb 27 08:51:29 crc kubenswrapper[4906]: I0227 08:51:29.775473 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5dfbf7b87b-s7l5j" podStartSLOduration=3.775446617 podStartE2EDuration="3.775446617s" podCreationTimestamp="2026-02-27 08:51:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:51:29.763012751 +0000 UTC m=+1388.157414371" watchObservedRunningTime="2026-02-27 08:51:29.775446617 +0000 UTC m=+1388.169848227" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.318179 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-79595755d6-s8v98"] Feb 27 08:51:30 crc kubenswrapper[4906]: E0227 08:51:30.319581 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afee5cf7-dd4d-490f-be62-3fbea3170858" containerName="dnsmasq-dns" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.319604 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="afee5cf7-dd4d-490f-be62-3fbea3170858" containerName="dnsmasq-dns" Feb 27 08:51:30 crc kubenswrapper[4906]: E0227 08:51:30.319662 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afee5cf7-dd4d-490f-be62-3fbea3170858" containerName="init" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.319673 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="afee5cf7-dd4d-490f-be62-3fbea3170858" containerName="init" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.320092 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="afee5cf7-dd4d-490f-be62-3fbea3170858" containerName="dnsmasq-dns" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.322994 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.328870 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.329165 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.333198 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-79595755d6-s8v98"] Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.500832 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-config-data\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.500934 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-config-data-custom\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.500976 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-internal-tls-certs\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.501264 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-public-tls-certs\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.501469 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66gr8\" (UniqueName: \"kubernetes.io/projected/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-kube-api-access-66gr8\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.501617 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-logs\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.501646 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-combined-ca-bundle\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.606487 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-logs\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.606542 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-combined-ca-bundle\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.606570 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-config-data\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.606597 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-config-data-custom\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.606633 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-internal-tls-certs\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.606793 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-public-tls-certs\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.606952 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66gr8\" (UniqueName: \"kubernetes.io/projected/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-kube-api-access-66gr8\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.607671 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-logs\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.626701 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-combined-ca-bundle\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.630789 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-public-tls-certs\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.631815 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-config-data-custom\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.639569 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66gr8\" (UniqueName: \"kubernetes.io/projected/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-kube-api-access-66gr8\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.643369 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-config-data\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.643926 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d0ff3fbe-cb4c-4337-96a2-b2b621691c91-internal-tls-certs\") pod \"barbican-api-79595755d6-s8v98\" (UID: \"d0ff3fbe-cb4c-4337-96a2-b2b621691c91\") " pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.796851 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-6d6854c956-6hqvk" Feb 27 08:51:30 crc kubenswrapper[4906]: I0227 08:51:30.945482 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:31 crc kubenswrapper[4906]: I0227 08:51:31.366227 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:31 crc kubenswrapper[4906]: I0227 08:51:31.776784 4906 generic.go:334] "Generic (PLEG): container finished" podID="76d04662-7576-4f57-aca2-e118e5efd771" containerID="7afff095932d9429eea67d34bf6f9ac2503e2c1396391f4b0d1fa256b46e910f" exitCode=0 Feb 27 08:51:31 crc kubenswrapper[4906]: I0227 08:51:31.776908 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-pg9nd" event={"ID":"76d04662-7576-4f57-aca2-e118e5efd771","Type":"ContainerDied","Data":"7afff095932d9429eea67d34bf6f9ac2503e2c1396391f4b0d1fa256b46e910f"} Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.026741 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-79595755d6-s8v98"] Feb 27 08:51:32 crc kubenswrapper[4906]: W0227 08:51:32.053085 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd0ff3fbe_cb4c_4337_96a2_b2b621691c91.slice/crio-de86f26cd89e804590e72a49cf862e317a00dfb96b83c5eff1790c3be9778045 WatchSource:0}: Error finding container de86f26cd89e804590e72a49cf862e317a00dfb96b83c5eff1790c3be9778045: Status 404 returned error can't find the container with id de86f26cd89e804590e72a49cf862e317a00dfb96b83c5eff1790c3be9778045 Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.202612 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-98c78d5f8-j9wmn" podUID="e6ab1c10-b552-4a69-94c7-68280ab7e126" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.149:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.149:8443: connect: connection refused" Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.314042 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7f78987f9b-lzmw8" podUID="6dc6534b-d5ec-4c53-bfc1-aae2389e3755" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.855011 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-688678b65d-rntkp" event={"ID":"8ba40093-618b-4802-807d-91b1686f98c6","Type":"ContainerStarted","Data":"8f125db8ef9aa846eadcacd21e5affe344df0a16d1fb420f639090a31cbac8e4"} Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.855560 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-688678b65d-rntkp" event={"ID":"8ba40093-618b-4802-807d-91b1686f98c6","Type":"ContainerStarted","Data":"15e31f3be32d7c948ea461e1fd624e826740d8c893eeee496b29a7eb548f14b1"} Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.875250 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6c9bf8484c-xcvbs" event={"ID":"3afec785-161c-4ca3-bc22-0c958826c2db","Type":"ContainerStarted","Data":"9767faaf44c7b00802b5f009b18b2327146f1b74eccd1ef209e3a0023b015064"} Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.875305 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-6c9bf8484c-xcvbs" event={"ID":"3afec785-161c-4ca3-bc22-0c958826c2db","Type":"ContainerStarted","Data":"6adef05aa1754827e6946c1717cca2d80b644a773f66418a1ff255052c821d31"} Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.881232 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-688678b65d-rntkp" podStartSLOduration=3.497541267 podStartE2EDuration="6.881205222s" podCreationTimestamp="2026-02-27 08:51:26 +0000 UTC" firstStartedPulling="2026-02-27 08:51:28.061984181 +0000 UTC m=+1386.456385791" lastFinishedPulling="2026-02-27 08:51:31.445648136 +0000 UTC m=+1389.840049746" observedRunningTime="2026-02-27 08:51:32.880556595 +0000 UTC m=+1391.274958215" watchObservedRunningTime="2026-02-27 08:51:32.881205222 +0000 UTC m=+1391.275606832" Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.896819 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" event={"ID":"76948b3e-a832-499b-a662-49e3669030d4","Type":"ContainerStarted","Data":"afdf458f72cd5d8887b3ddfbf0c772d7c4aac0a852089a8d8869f0af3e31a7c6"} Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.897037 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.901790 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-79595755d6-s8v98" event={"ID":"d0ff3fbe-cb4c-4337-96a2-b2b621691c91","Type":"ContainerStarted","Data":"7e63508f1246ac4dd4f107e12a9435ecd0a122cf34b0bf604219aa0069a86ba5"} Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.901866 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-79595755d6-s8v98" event={"ID":"d0ff3fbe-cb4c-4337-96a2-b2b621691c91","Type":"ContainerStarted","Data":"157bc122aa1399b69a53dc743be523235e059ce294ae1354665a930d9fbd9e97"} Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.902050 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-79595755d6-s8v98" event={"ID":"d0ff3fbe-cb4c-4337-96a2-b2b621691c91","Type":"ContainerStarted","Data":"de86f26cd89e804590e72a49cf862e317a00dfb96b83c5eff1790c3be9778045"} Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.904254 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.904287 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.945145 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c98486bd-1325-4072-bce0-a28d38ecead2","Type":"ContainerStarted","Data":"c5807f38896c90f812fa1bc72eac72ee9e6826c71a003148c963afe226e43957"} Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.945309 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c98486bd-1325-4072-bce0-a28d38ecead2","Type":"ContainerStarted","Data":"d18239df68284f445347998ce5de7b8beeea2352573b99d2c3c8bd9435e549cf"} Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.945337 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c98486bd-1325-4072-bce0-a28d38ecead2","Type":"ContainerStarted","Data":"8fde7f9341b60b9bad4a429694dcdfaf23859ee7b50662b1136407b879962524"} Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.964448 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" podStartSLOduration=6.964420285 podStartE2EDuration="6.964420285s" podCreationTimestamp="2026-02-27 08:51:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:51:32.937867418 +0000 UTC m=+1391.332269028" watchObservedRunningTime="2026-02-27 08:51:32.964420285 +0000 UTC m=+1391.358821895" Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.973375 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-6c9bf8484c-xcvbs" podStartSLOduration=3.577848674 podStartE2EDuration="6.973337919s" podCreationTimestamp="2026-02-27 08:51:26 +0000 UTC" firstStartedPulling="2026-02-27 08:51:28.026054059 +0000 UTC m=+1386.420455669" lastFinishedPulling="2026-02-27 08:51:31.421543304 +0000 UTC m=+1389.815944914" observedRunningTime="2026-02-27 08:51:32.915703027 +0000 UTC m=+1391.310104637" watchObservedRunningTime="2026-02-27 08:51:32.973337919 +0000 UTC m=+1391.367739519" Feb 27 08:51:32 crc kubenswrapper[4906]: I0227 08:51:32.975434 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-79595755d6-s8v98" podStartSLOduration=2.975420263 podStartE2EDuration="2.975420263s" podCreationTimestamp="2026-02-27 08:51:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:51:32.973166194 +0000 UTC m=+1391.367567804" watchObservedRunningTime="2026-02-27 08:51:32.975420263 +0000 UTC m=+1391.369821873" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.428381 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.432377 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Feb 27 08:51:33 crc kubenswrapper[4906]: E0227 08:51:33.433052 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76d04662-7576-4f57-aca2-e118e5efd771" containerName="cinder-db-sync" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.433082 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="76d04662-7576-4f57-aca2-e118e5efd771" containerName="cinder-db-sync" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.433358 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="76d04662-7576-4f57-aca2-e118e5efd771" containerName="cinder-db-sync" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.434360 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.439719 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-q62th" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.440030 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.440212 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.454092 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.554014 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-scripts\") pod \"76d04662-7576-4f57-aca2-e118e5efd771\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.554086 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-db-sync-config-data\") pod \"76d04662-7576-4f57-aca2-e118e5efd771\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.554125 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-combined-ca-bundle\") pod \"76d04662-7576-4f57-aca2-e118e5efd771\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.554186 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-config-data\") pod \"76d04662-7576-4f57-aca2-e118e5efd771\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.554310 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/76d04662-7576-4f57-aca2-e118e5efd771-etc-machine-id\") pod \"76d04662-7576-4f57-aca2-e118e5efd771\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.554365 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lf7n6\" (UniqueName: \"kubernetes.io/projected/76d04662-7576-4f57-aca2-e118e5efd771-kube-api-access-lf7n6\") pod \"76d04662-7576-4f57-aca2-e118e5efd771\" (UID: \"76d04662-7576-4f57-aca2-e118e5efd771\") " Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.554785 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e4bf0f4a-8f16-4255-8d40-37826771ba47-openstack-config-secret\") pod \"openstackclient\" (UID: \"e4bf0f4a-8f16-4255-8d40-37826771ba47\") " pod="openstack/openstackclient" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.554906 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/76d04662-7576-4f57-aca2-e118e5efd771-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "76d04662-7576-4f57-aca2-e118e5efd771" (UID: "76d04662-7576-4f57-aca2-e118e5efd771"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.554968 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e4bf0f4a-8f16-4255-8d40-37826771ba47-openstack-config\") pod \"openstackclient\" (UID: \"e4bf0f4a-8f16-4255-8d40-37826771ba47\") " pod="openstack/openstackclient" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.555009 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvff4\" (UniqueName: \"kubernetes.io/projected/e4bf0f4a-8f16-4255-8d40-37826771ba47-kube-api-access-vvff4\") pod \"openstackclient\" (UID: \"e4bf0f4a-8f16-4255-8d40-37826771ba47\") " pod="openstack/openstackclient" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.555223 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4bf0f4a-8f16-4255-8d40-37826771ba47-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e4bf0f4a-8f16-4255-8d40-37826771ba47\") " pod="openstack/openstackclient" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.555790 4906 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/76d04662-7576-4f57-aca2-e118e5efd771-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.571785 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "76d04662-7576-4f57-aca2-e118e5efd771" (UID: "76d04662-7576-4f57-aca2-e118e5efd771"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.572127 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76d04662-7576-4f57-aca2-e118e5efd771-kube-api-access-lf7n6" (OuterVolumeSpecName: "kube-api-access-lf7n6") pod "76d04662-7576-4f57-aca2-e118e5efd771" (UID: "76d04662-7576-4f57-aca2-e118e5efd771"). InnerVolumeSpecName "kube-api-access-lf7n6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.572443 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-scripts" (OuterVolumeSpecName: "scripts") pod "76d04662-7576-4f57-aca2-e118e5efd771" (UID: "76d04662-7576-4f57-aca2-e118e5efd771"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.601083 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "76d04662-7576-4f57-aca2-e118e5efd771" (UID: "76d04662-7576-4f57-aca2-e118e5efd771"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.624082 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-796c549d8f-qpw49" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.642016 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-config-data" (OuterVolumeSpecName: "config-data") pod "76d04662-7576-4f57-aca2-e118e5efd771" (UID: "76d04662-7576-4f57-aca2-e118e5efd771"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.661291 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e4bf0f4a-8f16-4255-8d40-37826771ba47-openstack-config\") pod \"openstackclient\" (UID: \"e4bf0f4a-8f16-4255-8d40-37826771ba47\") " pod="openstack/openstackclient" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.661332 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvff4\" (UniqueName: \"kubernetes.io/projected/e4bf0f4a-8f16-4255-8d40-37826771ba47-kube-api-access-vvff4\") pod \"openstackclient\" (UID: \"e4bf0f4a-8f16-4255-8d40-37826771ba47\") " pod="openstack/openstackclient" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.661380 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4bf0f4a-8f16-4255-8d40-37826771ba47-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e4bf0f4a-8f16-4255-8d40-37826771ba47\") " pod="openstack/openstackclient" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.661423 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e4bf0f4a-8f16-4255-8d40-37826771ba47-openstack-config-secret\") pod \"openstackclient\" (UID: \"e4bf0f4a-8f16-4255-8d40-37826771ba47\") " pod="openstack/openstackclient" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.661489 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.661501 4906 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.661515 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.661524 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/76d04662-7576-4f57-aca2-e118e5efd771-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.661534 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lf7n6\" (UniqueName: \"kubernetes.io/projected/76d04662-7576-4f57-aca2-e118e5efd771-kube-api-access-lf7n6\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.668471 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/e4bf0f4a-8f16-4255-8d40-37826771ba47-openstack-config\") pod \"openstackclient\" (UID: \"e4bf0f4a-8f16-4255-8d40-37826771ba47\") " pod="openstack/openstackclient" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.673726 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/e4bf0f4a-8f16-4255-8d40-37826771ba47-openstack-config-secret\") pod \"openstackclient\" (UID: \"e4bf0f4a-8f16-4255-8d40-37826771ba47\") " pod="openstack/openstackclient" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.682051 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.685057 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.703195 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4bf0f4a-8f16-4255-8d40-37826771ba47-combined-ca-bundle\") pod \"openstackclient\" (UID: \"e4bf0f4a-8f16-4255-8d40-37826771ba47\") " pod="openstack/openstackclient" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.745034 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvff4\" (UniqueName: \"kubernetes.io/projected/e4bf0f4a-8f16-4255-8d40-37826771ba47-kube-api-access-vvff4\") pod \"openstackclient\" (UID: \"e4bf0f4a-8f16-4255-8d40-37826771ba47\") " pod="openstack/openstackclient" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.757530 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5f95d46d88-tlzqg"] Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.757851 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5f95d46d88-tlzqg" podUID="3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4" containerName="neutron-api" containerID="cri-o://d9811e0f5aaa9de6a809c26ad72bccf19cb90003607d3ab9efb4105c167a5309" gracePeriod=30 Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.758523 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-5f95d46d88-tlzqg" podUID="3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4" containerName="neutron-httpd" containerID="cri-o://86dd6449fddc39957e004f7b6b397c4d4621611eda44f64973038e9868bc3fb9" gracePeriod=30 Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.783184 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 27 08:51:33 crc kubenswrapper[4906]: I0227 08:51:33.793826 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:33.991140 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-pg9nd" event={"ID":"76d04662-7576-4f57-aca2-e118e5efd771","Type":"ContainerDied","Data":"39c72e0b511ebd5e160b28d4b5c22a3502e4251faa85d0a1b17c0494a32a9786"} Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:33.991197 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="39c72e0b511ebd5e160b28d4b5c22a3502e4251faa85d0a1b17c0494a32a9786" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:33.991264 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-pg9nd" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.063895 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c98486bd-1325-4072-bce0-a28d38ecead2","Type":"ContainerStarted","Data":"ccd6552ddb3fa4660f6a1a80e28cd557d5e2b02b6c9c724a9f10570771a6e048"} Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.063940 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.063952 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c98486bd-1325-4072-bce0-a28d38ecead2","Type":"ContainerStarted","Data":"41429de7cfd908672f02537698214d7d6e67af655c4d544b81f114ec36cbb9a1"} Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.489271 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.650947 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.653335 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.684434 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.696707 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.697065 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-wkdld" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.699491 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.699963 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.777044 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d649d8c65-kdhln"] Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.798370 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.798523 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fqxbc\" (UniqueName: \"kubernetes.io/projected/a0065e88-26a7-4491-ac2a-0c22c054b839-kube-api-access-fqxbc\") pod \"cinder-scheduler-0\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.798586 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.798651 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a0065e88-26a7-4491-ac2a-0c22c054b839-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.798682 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-scripts\") pod \"cinder-scheduler-0\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.798762 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-config-data\") pod \"cinder-scheduler-0\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.829761 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57fff66767-fvdqw"] Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.831810 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57fff66767-fvdqw" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.848213 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57fff66767-fvdqw"] Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.900402 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xb8kw\" (UniqueName: \"kubernetes.io/projected/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-kube-api-access-xb8kw\") pod \"dnsmasq-dns-57fff66767-fvdqw\" (UID: \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\") " pod="openstack/dnsmasq-dns-57fff66767-fvdqw" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.901217 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-ovsdbserver-nb\") pod \"dnsmasq-dns-57fff66767-fvdqw\" (UID: \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\") " pod="openstack/dnsmasq-dns-57fff66767-fvdqw" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.901276 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-config-data\") pod \"cinder-scheduler-0\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.901309 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-dns-svc\") pod \"dnsmasq-dns-57fff66767-fvdqw\" (UID: \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\") " pod="openstack/dnsmasq-dns-57fff66767-fvdqw" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.901334 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.901388 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fqxbc\" (UniqueName: \"kubernetes.io/projected/a0065e88-26a7-4491-ac2a-0c22c054b839-kube-api-access-fqxbc\") pod \"cinder-scheduler-0\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.901423 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.901450 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-ovsdbserver-sb\") pod \"dnsmasq-dns-57fff66767-fvdqw\" (UID: \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\") " pod="openstack/dnsmasq-dns-57fff66767-fvdqw" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.901484 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a0065e88-26a7-4491-ac2a-0c22c054b839-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.901505 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-scripts\") pod \"cinder-scheduler-0\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.901532 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-config\") pod \"dnsmasq-dns-57fff66767-fvdqw\" (UID: \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\") " pod="openstack/dnsmasq-dns-57fff66767-fvdqw" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.905843 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a0065e88-26a7-4491-ac2a-0c22c054b839-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.927000 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-scripts\") pod \"cinder-scheduler-0\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.927095 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.939355 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-config-data\") pod \"cinder-scheduler-0\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.951930 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fqxbc\" (UniqueName: \"kubernetes.io/projected/a0065e88-26a7-4491-ac2a-0c22c054b839-kube-api-access-fqxbc\") pod \"cinder-scheduler-0\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.952489 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:34 crc kubenswrapper[4906]: I0227 08:51:34.998369 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.000932 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.005325 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-ovsdbserver-sb\") pod \"dnsmasq-dns-57fff66767-fvdqw\" (UID: \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\") " pod="openstack/dnsmasq-dns-57fff66767-fvdqw" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.005435 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-config\") pod \"dnsmasq-dns-57fff66767-fvdqw\" (UID: \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\") " pod="openstack/dnsmasq-dns-57fff66767-fvdqw" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.005472 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xb8kw\" (UniqueName: \"kubernetes.io/projected/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-kube-api-access-xb8kw\") pod \"dnsmasq-dns-57fff66767-fvdqw\" (UID: \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\") " pod="openstack/dnsmasq-dns-57fff66767-fvdqw" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.005496 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-ovsdbserver-nb\") pod \"dnsmasq-dns-57fff66767-fvdqw\" (UID: \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\") " pod="openstack/dnsmasq-dns-57fff66767-fvdqw" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.005537 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-dns-svc\") pod \"dnsmasq-dns-57fff66767-fvdqw\" (UID: \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\") " pod="openstack/dnsmasq-dns-57fff66767-fvdqw" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.005859 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.009487 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.026620 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.031251 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-ovsdbserver-sb\") pod \"dnsmasq-dns-57fff66767-fvdqw\" (UID: \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\") " pod="openstack/dnsmasq-dns-57fff66767-fvdqw" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.031245 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-ovsdbserver-nb\") pod \"dnsmasq-dns-57fff66767-fvdqw\" (UID: \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\") " pod="openstack/dnsmasq-dns-57fff66767-fvdqw" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.032154 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-dns-svc\") pod \"dnsmasq-dns-57fff66767-fvdqw\" (UID: \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\") " pod="openstack/dnsmasq-dns-57fff66767-fvdqw" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.046504 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-config\") pod \"dnsmasq-dns-57fff66767-fvdqw\" (UID: \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\") " pod="openstack/dnsmasq-dns-57fff66767-fvdqw" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.115659 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-config-data\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.115723 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-config-data-custom\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.115985 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6717d894-ee45-4de0-9c94-5778bf5d9884-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.116016 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.116071 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2pxg5\" (UniqueName: \"kubernetes.io/projected/6717d894-ee45-4de0-9c94-5778bf5d9884-kube-api-access-2pxg5\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.116118 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6717d894-ee45-4de0-9c94-5778bf5d9884-logs\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.116201 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-scripts\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.143079 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xb8kw\" (UniqueName: \"kubernetes.io/projected/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-kube-api-access-xb8kw\") pod \"dnsmasq-dns-57fff66767-fvdqw\" (UID: \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\") " pod="openstack/dnsmasq-dns-57fff66767-fvdqw" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.186151 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57fff66767-fvdqw" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.219981 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-config-data\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.235293 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-config-data-custom\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.235774 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.235858 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6717d894-ee45-4de0-9c94-5778bf5d9884-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.235986 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2pxg5\" (UniqueName: \"kubernetes.io/projected/6717d894-ee45-4de0-9c94-5778bf5d9884-kube-api-access-2pxg5\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.236098 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6717d894-ee45-4de0-9c94-5778bf5d9884-logs\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.236280 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-scripts\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.237404 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6717d894-ee45-4de0-9c94-5778bf5d9884-etc-machine-id\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.241210 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6717d894-ee45-4de0-9c94-5778bf5d9884-logs\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.250781 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.251375 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c98486bd-1325-4072-bce0-a28d38ecead2","Type":"ContainerStarted","Data":"05f1de0620249acf2d744465fde16eacb78436b90665bdb813c51acd90bda0e8"} Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.254974 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-scripts\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.264247 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-config-data-custom\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.287285 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-config-data\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.309057 4906 generic.go:334] "Generic (PLEG): container finished" podID="3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4" containerID="86dd6449fddc39957e004f7b6b397c4d4621611eda44f64973038e9868bc3fb9" exitCode=0 Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.309451 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f95d46d88-tlzqg" event={"ID":"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4","Type":"ContainerDied","Data":"86dd6449fddc39957e004f7b6b397c4d4621611eda44f64973038e9868bc3fb9"} Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.312081 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.312401 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" podUID="76948b3e-a832-499b-a662-49e3669030d4" containerName="dnsmasq-dns" containerID="cri-o://afdf458f72cd5d8887b3ddfbf0c772d7c4aac0a852089a8d8869f0af3e31a7c6" gracePeriod=10 Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.325721 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2pxg5\" (UniqueName: \"kubernetes.io/projected/6717d894-ee45-4de0-9c94-5778bf5d9884-kube-api-access-2pxg5\") pod \"cinder-api-0\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.523555 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.542483 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 27 08:51:35 crc kubenswrapper[4906]: I0227 08:51:35.954257 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57fff66767-fvdqw"] Feb 27 08:51:36 crc kubenswrapper[4906]: I0227 08:51:36.037142 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 27 08:51:36 crc kubenswrapper[4906]: I0227 08:51:36.286416 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-59d7bdb8d4-m9nsb" Feb 27 08:51:36 crc kubenswrapper[4906]: I0227 08:51:36.351840 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"e4bf0f4a-8f16-4255-8d40-37826771ba47","Type":"ContainerStarted","Data":"c1fa463d3e140761e6b2d52b2f53fa916d1e08c99bd34ea052151a34fd54d4b4"} Feb 27 08:51:36 crc kubenswrapper[4906]: I0227 08:51:36.377172 4906 generic.go:334] "Generic (PLEG): container finished" podID="76948b3e-a832-499b-a662-49e3669030d4" containerID="afdf458f72cd5d8887b3ddfbf0c772d7c4aac0a852089a8d8869f0af3e31a7c6" exitCode=0 Feb 27 08:51:36 crc kubenswrapper[4906]: I0227 08:51:36.377243 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" event={"ID":"76948b3e-a832-499b-a662-49e3669030d4","Type":"ContainerDied","Data":"afdf458f72cd5d8887b3ddfbf0c772d7c4aac0a852089a8d8869f0af3e31a7c6"} Feb 27 08:51:36 crc kubenswrapper[4906]: I0227 08:51:36.418711 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 27 08:51:36 crc kubenswrapper[4906]: W0227 08:51:36.461388 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6717d894_ee45_4de0_9c94_5778bf5d9884.slice/crio-b9c1978c70115178be2895edd6f3f1877206ac33e78045196d44981001a553fa WatchSource:0}: Error finding container b9c1978c70115178be2895edd6f3f1877206ac33e78045196d44981001a553fa: Status 404 returned error can't find the container with id b9c1978c70115178be2895edd6f3f1877206ac33e78045196d44981001a553fa Feb 27 08:51:36 crc kubenswrapper[4906]: I0227 08:51:36.469610 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"c98486bd-1325-4072-bce0-a28d38ecead2","Type":"ContainerStarted","Data":"2e51acdfdafe988c4be920b7aca5e13b73188e025f9c7565c26b88baf0b76d9e"} Feb 27 08:51:36 crc kubenswrapper[4906]: I0227 08:51:36.478078 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a0065e88-26a7-4491-ac2a-0c22c054b839","Type":"ContainerStarted","Data":"733b7069e621159c5169a359ebe0e431603707176696388c81967e1f89134e69"} Feb 27 08:51:36 crc kubenswrapper[4906]: I0227 08:51:36.484603 4906 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 27 08:51:36 crc kubenswrapper[4906]: I0227 08:51:36.486224 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57fff66767-fvdqw" event={"ID":"8b3f1a22-8f41-40cc-8ae3-27241a6d507f","Type":"ContainerStarted","Data":"db0132dfd4cf111b76d6142ed981eed553e1662b0dfbe81668c87bc6b9339575"} Feb 27 08:51:36 crc kubenswrapper[4906]: I0227 08:51:36.635598 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=135.845655607 podStartE2EDuration="2m27.635578971s" podCreationTimestamp="2026-02-27 08:49:09 +0000 UTC" firstStartedPulling="2026-02-27 08:51:19.631727663 +0000 UTC m=+1378.026129273" lastFinishedPulling="2026-02-27 08:51:31.421651027 +0000 UTC m=+1389.816052637" observedRunningTime="2026-02-27 08:51:36.538561666 +0000 UTC m=+1394.932963276" watchObservedRunningTime="2026-02-27 08:51:36.635578971 +0000 UTC m=+1395.029980581" Feb 27 08:51:36 crc kubenswrapper[4906]: I0227 08:51:36.956298 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57fff66767-fvdqw"] Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.004800 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-65bc8f75b9-q99p2"] Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.007547 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.011233 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.023920 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-65bc8f75b9-q99p2"] Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.137905 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-dns-svc\") pod \"dnsmasq-dns-65bc8f75b9-q99p2\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.137972 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-dns-swift-storage-0\") pod \"dnsmasq-dns-65bc8f75b9-q99p2\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.138000 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-ovsdbserver-sb\") pod \"dnsmasq-dns-65bc8f75b9-q99p2\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.138034 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwnhd\" (UniqueName: \"kubernetes.io/projected/34296caa-6147-4848-a0c6-2b5be70028d1-kube-api-access-fwnhd\") pod \"dnsmasq-dns-65bc8f75b9-q99p2\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.138065 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-config\") pod \"dnsmasq-dns-65bc8f75b9-q99p2\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.138088 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-ovsdbserver-nb\") pod \"dnsmasq-dns-65bc8f75b9-q99p2\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.245594 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-dns-swift-storage-0\") pod \"dnsmasq-dns-65bc8f75b9-q99p2\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.245671 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-ovsdbserver-sb\") pod \"dnsmasq-dns-65bc8f75b9-q99p2\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.245723 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwnhd\" (UniqueName: \"kubernetes.io/projected/34296caa-6147-4848-a0c6-2b5be70028d1-kube-api-access-fwnhd\") pod \"dnsmasq-dns-65bc8f75b9-q99p2\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.245776 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-config\") pod \"dnsmasq-dns-65bc8f75b9-q99p2\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.245815 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-ovsdbserver-nb\") pod \"dnsmasq-dns-65bc8f75b9-q99p2\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.246031 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-dns-svc\") pod \"dnsmasq-dns-65bc8f75b9-q99p2\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.247597 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-dns-svc\") pod \"dnsmasq-dns-65bc8f75b9-q99p2\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.248428 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-config\") pod \"dnsmasq-dns-65bc8f75b9-q99p2\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.248726 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-dns-swift-storage-0\") pod \"dnsmasq-dns-65bc8f75b9-q99p2\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.249170 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-ovsdbserver-nb\") pod \"dnsmasq-dns-65bc8f75b9-q99p2\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.249765 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-ovsdbserver-sb\") pod \"dnsmasq-dns-65bc8f75b9-q99p2\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.276079 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwnhd\" (UniqueName: \"kubernetes.io/projected/34296caa-6147-4848-a0c6-2b5be70028d1-kube-api-access-fwnhd\") pod \"dnsmasq-dns-65bc8f75b9-q99p2\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.373668 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.405050 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.551277 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.551604 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zjg2g\" (UniqueName: \"kubernetes.io/projected/76948b3e-a832-499b-a662-49e3669030d4-kube-api-access-zjg2g\") pod \"76948b3e-a832-499b-a662-49e3669030d4\" (UID: \"76948b3e-a832-499b-a662-49e3669030d4\") " Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.551735 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-ovsdbserver-nb\") pod \"76948b3e-a832-499b-a662-49e3669030d4\" (UID: \"76948b3e-a832-499b-a662-49e3669030d4\") " Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.551950 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-config\") pod \"76948b3e-a832-499b-a662-49e3669030d4\" (UID: \"76948b3e-a832-499b-a662-49e3669030d4\") " Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.552071 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-dns-svc\") pod \"76948b3e-a832-499b-a662-49e3669030d4\" (UID: \"76948b3e-a832-499b-a662-49e3669030d4\") " Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.552117 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-ovsdbserver-sb\") pod \"76948b3e-a832-499b-a662-49e3669030d4\" (UID: \"76948b3e-a832-499b-a662-49e3669030d4\") " Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.559758 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" event={"ID":"76948b3e-a832-499b-a662-49e3669030d4","Type":"ContainerDied","Data":"fdc18a44712faf1d3f5ace22e553d741df0944e04a0c5d8019bdcf966a64e3e7"} Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.560144 4906 scope.go:117] "RemoveContainer" containerID="afdf458f72cd5d8887b3ddfbf0c772d7c4aac0a852089a8d8869f0af3e31a7c6" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.560353 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d649d8c65-kdhln" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.589709 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76948b3e-a832-499b-a662-49e3669030d4-kube-api-access-zjg2g" (OuterVolumeSpecName: "kube-api-access-zjg2g") pod "76948b3e-a832-499b-a662-49e3669030d4" (UID: "76948b3e-a832-499b-a662-49e3669030d4"). InnerVolumeSpecName "kube-api-access-zjg2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.642548 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6717d894-ee45-4de0-9c94-5778bf5d9884","Type":"ContainerStarted","Data":"b9c1978c70115178be2895edd6f3f1877206ac33e78045196d44981001a553fa"} Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.660779 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zjg2g\" (UniqueName: \"kubernetes.io/projected/76948b3e-a832-499b-a662-49e3669030d4-kube-api-access-zjg2g\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.670578 4906 generic.go:334] "Generic (PLEG): container finished" podID="8b3f1a22-8f41-40cc-8ae3-27241a6d507f" containerID="0c9e37be5a6f236f044f4cbc80b2c4173c65c82d6e44ced55d64da2746e195c2" exitCode=0 Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.671153 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57fff66767-fvdqw" podUID="8b3f1a22-8f41-40cc-8ae3-27241a6d507f" containerName="dnsmasq-dns" containerID="cri-o://49ed16bdde46b971cdc3c366293d69fb0a236f3bd3ebab8e6bb97bbf25d9963e" gracePeriod=10 Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.671845 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57fff66767-fvdqw" event={"ID":"8b3f1a22-8f41-40cc-8ae3-27241a6d507f","Type":"ContainerDied","Data":"0c9e37be5a6f236f044f4cbc80b2c4173c65c82d6e44ced55d64da2746e195c2"} Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.671921 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57fff66767-fvdqw" event={"ID":"8b3f1a22-8f41-40cc-8ae3-27241a6d507f","Type":"ContainerStarted","Data":"49ed16bdde46b971cdc3c366293d69fb0a236f3bd3ebab8e6bb97bbf25d9963e"} Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.672310 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57fff66767-fvdqw" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.691227 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "76948b3e-a832-499b-a662-49e3669030d4" (UID: "76948b3e-a832-499b-a662-49e3669030d4"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.713725 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57fff66767-fvdqw" podStartSLOduration=3.713704761 podStartE2EDuration="3.713704761s" podCreationTimestamp="2026-02-27 08:51:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:51:37.703585155 +0000 UTC m=+1396.097986765" watchObservedRunningTime="2026-02-27 08:51:37.713704761 +0000 UTC m=+1396.108106371" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.724005 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "76948b3e-a832-499b-a662-49e3669030d4" (UID: "76948b3e-a832-499b-a662-49e3669030d4"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.741663 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-config" (OuterVolumeSpecName: "config") pod "76948b3e-a832-499b-a662-49e3669030d4" (UID: "76948b3e-a832-499b-a662-49e3669030d4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.756589 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "76948b3e-a832-499b-a662-49e3669030d4" (UID: "76948b3e-a832-499b-a662-49e3669030d4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.771264 4906 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.772020 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.772141 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.772327 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76948b3e-a832-499b-a662-49e3669030d4-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.839184 4906 scope.go:117] "RemoveContainer" containerID="d7ba8617395d73ef7cba7b69b8ac7e449894227ab090f4a6fcc75ab236f25328" Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.934309 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d649d8c65-kdhln"] Feb 27 08:51:37 crc kubenswrapper[4906]: I0227 08:51:37.966200 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7d649d8c65-kdhln"] Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.067809 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-65bc8f75b9-q99p2"] Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.505611 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57fff66767-fvdqw" Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.593241 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76948b3e-a832-499b-a662-49e3669030d4" path="/var/lib/kubelet/pods/76948b3e-a832-499b-a662-49e3669030d4/volumes" Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.615127 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-ovsdbserver-nb\") pod \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\" (UID: \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\") " Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.615302 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-config\") pod \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\" (UID: \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\") " Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.616105 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-ovsdbserver-sb\") pod \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\" (UID: \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\") " Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.616191 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-dns-svc\") pod \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\" (UID: \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\") " Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.616222 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xb8kw\" (UniqueName: \"kubernetes.io/projected/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-kube-api-access-xb8kw\") pod \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\" (UID: \"8b3f1a22-8f41-40cc-8ae3-27241a6d507f\") " Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.631483 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-kube-api-access-xb8kw" (OuterVolumeSpecName: "kube-api-access-xb8kw") pod "8b3f1a22-8f41-40cc-8ae3-27241a6d507f" (UID: "8b3f1a22-8f41-40cc-8ae3-27241a6d507f"). InnerVolumeSpecName "kube-api-access-xb8kw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.697230 4906 generic.go:334] "Generic (PLEG): container finished" podID="8b3f1a22-8f41-40cc-8ae3-27241a6d507f" containerID="49ed16bdde46b971cdc3c366293d69fb0a236f3bd3ebab8e6bb97bbf25d9963e" exitCode=0 Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.697431 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57fff66767-fvdqw" event={"ID":"8b3f1a22-8f41-40cc-8ae3-27241a6d507f","Type":"ContainerDied","Data":"49ed16bdde46b971cdc3c366293d69fb0a236f3bd3ebab8e6bb97bbf25d9963e"} Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.697964 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57fff66767-fvdqw" event={"ID":"8b3f1a22-8f41-40cc-8ae3-27241a6d507f","Type":"ContainerDied","Data":"db0132dfd4cf111b76d6142ed981eed553e1662b0dfbe81668c87bc6b9339575"} Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.697570 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57fff66767-fvdqw" Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.698009 4906 scope.go:117] "RemoveContainer" containerID="49ed16bdde46b971cdc3c366293d69fb0a236f3bd3ebab8e6bb97bbf25d9963e" Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.704641 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-config" (OuterVolumeSpecName: "config") pod "8b3f1a22-8f41-40cc-8ae3-27241a6d507f" (UID: "8b3f1a22-8f41-40cc-8ae3-27241a6d507f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.715735 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" event={"ID":"34296caa-6147-4848-a0c6-2b5be70028d1","Type":"ContainerStarted","Data":"a7211580abb6aae0dd574ff634bcff55f0a983f9b680266c274c60007b7d18f5"} Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.715800 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" event={"ID":"34296caa-6147-4848-a0c6-2b5be70028d1","Type":"ContainerStarted","Data":"797666ea950012bc2a0556a55ebf7e99c7864e5ed8536711370e68b1c3afd33b"} Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.720428 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8b3f1a22-8f41-40cc-8ae3-27241a6d507f" (UID: "8b3f1a22-8f41-40cc-8ae3-27241a6d507f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.726994 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.727050 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.727065 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xb8kw\" (UniqueName: \"kubernetes.io/projected/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-kube-api-access-xb8kw\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.735501 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8b3f1a22-8f41-40cc-8ae3-27241a6d507f" (UID: "8b3f1a22-8f41-40cc-8ae3-27241a6d507f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.736686 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8b3f1a22-8f41-40cc-8ae3-27241a6d507f" (UID: "8b3f1a22-8f41-40cc-8ae3-27241a6d507f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.738155 4906 generic.go:334] "Generic (PLEG): container finished" podID="3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4" containerID="d9811e0f5aaa9de6a809c26ad72bccf19cb90003607d3ab9efb4105c167a5309" exitCode=0 Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.738282 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f95d46d88-tlzqg" event={"ID":"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4","Type":"ContainerDied","Data":"d9811e0f5aaa9de6a809c26ad72bccf19cb90003607d3ab9efb4105c167a5309"} Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.747173 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6717d894-ee45-4de0-9c94-5778bf5d9884","Type":"ContainerStarted","Data":"fedd178614a8e52faadf3967d5eb0cddf6b0de85fc06a858266f0b94ac46df66"} Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.772200 4906 scope.go:117] "RemoveContainer" containerID="0c9e37be5a6f236f044f4cbc80b2c4173c65c82d6e44ced55d64da2746e195c2" Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.831305 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.831343 4906 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8b3f1a22-8f41-40cc-8ae3-27241a6d507f-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.891918 4906 scope.go:117] "RemoveContainer" containerID="49ed16bdde46b971cdc3c366293d69fb0a236f3bd3ebab8e6bb97bbf25d9963e" Feb 27 08:51:38 crc kubenswrapper[4906]: E0227 08:51:38.898794 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49ed16bdde46b971cdc3c366293d69fb0a236f3bd3ebab8e6bb97bbf25d9963e\": container with ID starting with 49ed16bdde46b971cdc3c366293d69fb0a236f3bd3ebab8e6bb97bbf25d9963e not found: ID does not exist" containerID="49ed16bdde46b971cdc3c366293d69fb0a236f3bd3ebab8e6bb97bbf25d9963e" Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.899104 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49ed16bdde46b971cdc3c366293d69fb0a236f3bd3ebab8e6bb97bbf25d9963e"} err="failed to get container status \"49ed16bdde46b971cdc3c366293d69fb0a236f3bd3ebab8e6bb97bbf25d9963e\": rpc error: code = NotFound desc = could not find container \"49ed16bdde46b971cdc3c366293d69fb0a236f3bd3ebab8e6bb97bbf25d9963e\": container with ID starting with 49ed16bdde46b971cdc3c366293d69fb0a236f3bd3ebab8e6bb97bbf25d9963e not found: ID does not exist" Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.899211 4906 scope.go:117] "RemoveContainer" containerID="0c9e37be5a6f236f044f4cbc80b2c4173c65c82d6e44ced55d64da2746e195c2" Feb 27 08:51:38 crc kubenswrapper[4906]: E0227 08:51:38.909354 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c9e37be5a6f236f044f4cbc80b2c4173c65c82d6e44ced55d64da2746e195c2\": container with ID starting with 0c9e37be5a6f236f044f4cbc80b2c4173c65c82d6e44ced55d64da2746e195c2 not found: ID does not exist" containerID="0c9e37be5a6f236f044f4cbc80b2c4173c65c82d6e44ced55d64da2746e195c2" Feb 27 08:51:38 crc kubenswrapper[4906]: I0227 08:51:38.909420 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c9e37be5a6f236f044f4cbc80b2c4173c65c82d6e44ced55d64da2746e195c2"} err="failed to get container status \"0c9e37be5a6f236f044f4cbc80b2c4173c65c82d6e44ced55d64da2746e195c2\": rpc error: code = NotFound desc = could not find container \"0c9e37be5a6f236f044f4cbc80b2c4173c65c82d6e44ced55d64da2746e195c2\": container with ID starting with 0c9e37be5a6f236f044f4cbc80b2c4173c65c82d6e44ced55d64da2746e195c2 not found: ID does not exist" Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.149066 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57fff66767-fvdqw"] Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.204022 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57fff66767-fvdqw"] Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.442789 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.462042 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.462219 4906 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.578641 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-ovndb-tls-certs\") pod \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\" (UID: \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\") " Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.578715 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-combined-ca-bundle\") pod \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\" (UID: \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\") " Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.578802 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-httpd-config\") pod \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\" (UID: \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\") " Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.578841 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqwlp\" (UniqueName: \"kubernetes.io/projected/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-kube-api-access-kqwlp\") pod \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\" (UID: \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\") " Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.579063 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-config\") pod \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\" (UID: \"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4\") " Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.604156 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4" (UID: "3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.610600 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-kube-api-access-kqwlp" (OuterVolumeSpecName: "kube-api-access-kqwlp") pod "3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4" (UID: "3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4"). InnerVolumeSpecName "kube-api-access-kqwlp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.684691 4906 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-httpd-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.684744 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqwlp\" (UniqueName: \"kubernetes.io/projected/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-kube-api-access-kqwlp\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.694092 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4" (UID: "3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.735319 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-config" (OuterVolumeSpecName: "config") pod "3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4" (UID: "3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.786305 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4" (UID: "3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.787746 4906 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.787768 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.787778 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.793254 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a0065e88-26a7-4491-ac2a-0c22c054b839","Type":"ContainerStarted","Data":"ba3762128c2d8cc88294ce87f0aef06ff265adabbd433454b72446ccb1492f56"} Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.831321 4906 generic.go:334] "Generic (PLEG): container finished" podID="34296caa-6147-4848-a0c6-2b5be70028d1" containerID="a7211580abb6aae0dd574ff634bcff55f0a983f9b680266c274c60007b7d18f5" exitCode=0 Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.831466 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" event={"ID":"34296caa-6147-4848-a0c6-2b5be70028d1","Type":"ContainerDied","Data":"a7211580abb6aae0dd574ff634bcff55f0a983f9b680266c274c60007b7d18f5"} Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.831512 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" event={"ID":"34296caa-6147-4848-a0c6-2b5be70028d1","Type":"ContainerStarted","Data":"9bd7487fb2a194f3772768b849738b9c4a53b98d5e42f771a716d3d6241a494a"} Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.833063 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.873905 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" podStartSLOduration=3.873867012 podStartE2EDuration="3.873867012s" podCreationTimestamp="2026-02-27 08:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:51:39.871400648 +0000 UTC m=+1398.265802258" watchObservedRunningTime="2026-02-27 08:51:39.873867012 +0000 UTC m=+1398.268268622" Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.876503 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5f95d46d88-tlzqg" event={"ID":"3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4","Type":"ContainerDied","Data":"d890e754f86bef0288dc98e4fe5510ca1ecb2f6843657fc7b00599d15baeba68"} Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.876571 4906 scope.go:117] "RemoveContainer" containerID="86dd6449fddc39957e004f7b6b397c4d4621611eda44f64973038e9868bc3fb9" Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.876700 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5f95d46d88-tlzqg" Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.928514 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-5f95d46d88-tlzqg"] Feb 27 08:51:39 crc kubenswrapper[4906]: I0227 08:51:39.936911 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-5f95d46d88-tlzqg"] Feb 27 08:51:40 crc kubenswrapper[4906]: I0227 08:51:40.001514 4906 scope.go:117] "RemoveContainer" containerID="d9811e0f5aaa9de6a809c26ad72bccf19cb90003607d3ab9efb4105c167a5309" Feb 27 08:51:40 crc kubenswrapper[4906]: I0227 08:51:40.504593 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:40 crc kubenswrapper[4906]: I0227 08:51:40.565346 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4" path="/var/lib/kubelet/pods/3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4/volumes" Feb 27 08:51:40 crc kubenswrapper[4906]: I0227 08:51:40.566457 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b3f1a22-8f41-40cc-8ae3-27241a6d507f" path="/var/lib/kubelet/pods/8b3f1a22-8f41-40cc-8ae3-27241a6d507f/volumes" Feb 27 08:51:40 crc kubenswrapper[4906]: I0227 08:51:40.763497 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 27 08:51:40 crc kubenswrapper[4906]: I0227 08:51:40.942336 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6717d894-ee45-4de0-9c94-5778bf5d9884","Type":"ContainerStarted","Data":"71bd2eacdf55ef5b7ab0cdc007f0e942baa98c45afd80ce0ee5f7a87823c688c"} Feb 27 08:51:40 crc kubenswrapper[4906]: I0227 08:51:40.942599 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="6717d894-ee45-4de0-9c94-5778bf5d9884" containerName="cinder-api-log" containerID="cri-o://fedd178614a8e52faadf3967d5eb0cddf6b0de85fc06a858266f0b94ac46df66" gracePeriod=30 Feb 27 08:51:40 crc kubenswrapper[4906]: I0227 08:51:40.943040 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Feb 27 08:51:40 crc kubenswrapper[4906]: I0227 08:51:40.943413 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="6717d894-ee45-4de0-9c94-5778bf5d9884" containerName="cinder-api" containerID="cri-o://71bd2eacdf55ef5b7ab0cdc007f0e942baa98c45afd80ce0ee5f7a87823c688c" gracePeriod=30 Feb 27 08:51:40 crc kubenswrapper[4906]: I0227 08:51:40.971393 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a0065e88-26a7-4491-ac2a-0c22c054b839","Type":"ContainerStarted","Data":"eb23296070858ebf704087e3d6751e05296e7b8805f8dbc7ece16860fe6c4a6a"} Feb 27 08:51:40 crc kubenswrapper[4906]: I0227 08:51:40.992184 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.992153186 podStartE2EDuration="6.992153186s" podCreationTimestamp="2026-02-27 08:51:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:51:40.969614485 +0000 UTC m=+1399.364016105" watchObservedRunningTime="2026-02-27 08:51:40.992153186 +0000 UTC m=+1399.386554796" Feb 27 08:51:41 crc kubenswrapper[4906]: I0227 08:51:41.029628 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.643531131 podStartE2EDuration="7.029594248s" podCreationTimestamp="2026-02-27 08:51:34 +0000 UTC" firstStartedPulling="2026-02-27 08:51:36.064947893 +0000 UTC m=+1394.459349503" lastFinishedPulling="2026-02-27 08:51:37.45101101 +0000 UTC m=+1395.845412620" observedRunningTime="2026-02-27 08:51:41.006222425 +0000 UTC m=+1399.400624035" watchObservedRunningTime="2026-02-27 08:51:41.029594248 +0000 UTC m=+1399.423995858" Feb 27 08:51:41 crc kubenswrapper[4906]: I0227 08:51:41.153938 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:41 crc kubenswrapper[4906]: I0227 08:51:41.998455 4906 generic.go:334] "Generic (PLEG): container finished" podID="6717d894-ee45-4de0-9c94-5778bf5d9884" containerID="fedd178614a8e52faadf3967d5eb0cddf6b0de85fc06a858266f0b94ac46df66" exitCode=143 Feb 27 08:51:41 crc kubenswrapper[4906]: I0227 08:51:41.998616 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6717d894-ee45-4de0-9c94-5778bf5d9884","Type":"ContainerDied","Data":"fedd178614a8e52faadf3967d5eb0cddf6b0de85fc06a858266f0b94ac46df66"} Feb 27 08:51:42 crc kubenswrapper[4906]: I0227 08:51:42.199832 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-98c78d5f8-j9wmn" podUID="e6ab1c10-b552-4a69-94c7-68280ab7e126" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.149:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.149:8443: connect: connection refused" Feb 27 08:51:42 crc kubenswrapper[4906]: I0227 08:51:42.309063 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7f78987f9b-lzmw8" podUID="6dc6534b-d5ec-4c53-bfc1-aae2389e3755" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.150:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.150:8443: connect: connection refused" Feb 27 08:51:43 crc kubenswrapper[4906]: I0227 08:51:43.276068 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:44 crc kubenswrapper[4906]: I0227 08:51:44.891632 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-79595755d6-s8v98" Feb 27 08:51:44 crc kubenswrapper[4906]: I0227 08:51:44.979988 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5dfbf7b87b-s7l5j"] Feb 27 08:51:44 crc kubenswrapper[4906]: I0227 08:51:44.980278 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5dfbf7b87b-s7l5j" podUID="f95124f9-c5e8-4445-9885-03cc47172f2f" containerName="barbican-api-log" containerID="cri-o://eeec9240c0f36a56a38f3e13442d988cd51447c258cf19b23a9d9bcf8ae40a3f" gracePeriod=30 Feb 27 08:51:44 crc kubenswrapper[4906]: I0227 08:51:44.980928 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5dfbf7b87b-s7l5j" podUID="f95124f9-c5e8-4445-9885-03cc47172f2f" containerName="barbican-api" containerID="cri-o://21aa51804a89937917282205a22028c7baebaba56a423bd6d396a960e7be0932" gracePeriod=30 Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.029565 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.581190 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.582218 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerName="ceilometer-central-agent" containerID="cri-o://d3eee1edec5c66b8097262495ce3f3fd6e5efbf8bebc77452174e5b6cf36c899" gracePeriod=30 Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.582242 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerName="ceilometer-notification-agent" containerID="cri-o://daeb5e1e83b53f1af0f9d3ed986190ba46ac2bce85e21b6fa6429b1f1a0cfda1" gracePeriod=30 Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.582456 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerName="proxy-httpd" containerID="cri-o://2163e99c114663ba9c16529fefc749c69ad4a034148d9011a16364b105692df1" gracePeriod=30 Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.582526 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerName="sg-core" containerID="cri-o://dcbca9bbfa08ac23a876f9d1e338833dc5a2ee1265b1b915283015e7b3f0c54d" gracePeriod=30 Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.673696 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.690973 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.160:3000/\": read tcp 10.217.0.2:37202->10.217.0.160:3000: read: connection reset by peer" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.848962 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-8d74fdd59-xdxbd"] Feb 27 08:51:45 crc kubenswrapper[4906]: E0227 08:51:45.849609 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76948b3e-a832-499b-a662-49e3669030d4" containerName="dnsmasq-dns" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.849639 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="76948b3e-a832-499b-a662-49e3669030d4" containerName="dnsmasq-dns" Feb 27 08:51:45 crc kubenswrapper[4906]: E0227 08:51:45.849661 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b3f1a22-8f41-40cc-8ae3-27241a6d507f" containerName="init" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.849668 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b3f1a22-8f41-40cc-8ae3-27241a6d507f" containerName="init" Feb 27 08:51:45 crc kubenswrapper[4906]: E0227 08:51:45.849702 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4" containerName="neutron-api" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.849710 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4" containerName="neutron-api" Feb 27 08:51:45 crc kubenswrapper[4906]: E0227 08:51:45.849728 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b3f1a22-8f41-40cc-8ae3-27241a6d507f" containerName="dnsmasq-dns" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.849735 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b3f1a22-8f41-40cc-8ae3-27241a6d507f" containerName="dnsmasq-dns" Feb 27 08:51:45 crc kubenswrapper[4906]: E0227 08:51:45.849742 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76948b3e-a832-499b-a662-49e3669030d4" containerName="init" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.849749 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="76948b3e-a832-499b-a662-49e3669030d4" containerName="init" Feb 27 08:51:45 crc kubenswrapper[4906]: E0227 08:51:45.849769 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4" containerName="neutron-httpd" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.849780 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4" containerName="neutron-httpd" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.850050 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4" containerName="neutron-api" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.850071 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b3f1a22-8f41-40cc-8ae3-27241a6d507f" containerName="dnsmasq-dns" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.850095 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="76948b3e-a832-499b-a662-49e3669030d4" containerName="dnsmasq-dns" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.850106 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="3de3e5d7-e4c1-4616-bf5a-fb0e0926bce4" containerName="neutron-httpd" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.851527 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.855659 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.855976 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.856159 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.862775 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-8d74fdd59-xdxbd"] Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.977104 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-combined-ca-bundle\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.977204 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-run-httpd\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.977238 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bswj\" (UniqueName: \"kubernetes.io/projected/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-kube-api-access-2bswj\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.977264 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-public-tls-certs\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.977609 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-internal-tls-certs\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.977798 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-log-httpd\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.977972 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-config-data\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:45 crc kubenswrapper[4906]: I0227 08:51:45.978222 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-etc-swift\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.079066 4906 generic.go:334] "Generic (PLEG): container finished" podID="f95124f9-c5e8-4445-9885-03cc47172f2f" containerID="eeec9240c0f36a56a38f3e13442d988cd51447c258cf19b23a9d9bcf8ae40a3f" exitCode=143 Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.079212 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dfbf7b87b-s7l5j" event={"ID":"f95124f9-c5e8-4445-9885-03cc47172f2f","Type":"ContainerDied","Data":"eeec9240c0f36a56a38f3e13442d988cd51447c258cf19b23a9d9bcf8ae40a3f"} Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.079797 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-internal-tls-certs\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.079899 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-log-httpd\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.079944 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-config-data\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.079986 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-etc-swift\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.080021 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-combined-ca-bundle\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.080053 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-run-httpd\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.080075 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bswj\" (UniqueName: \"kubernetes.io/projected/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-kube-api-access-2bswj\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.080096 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-public-tls-certs\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.080488 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-log-httpd\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.080833 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-run-httpd\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.089420 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-internal-tls-certs\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.090641 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-combined-ca-bundle\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.091410 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-etc-swift\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.092320 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-config-data\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.095246 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-public-tls-certs\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.099733 4906 generic.go:334] "Generic (PLEG): container finished" podID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerID="2163e99c114663ba9c16529fefc749c69ad4a034148d9011a16364b105692df1" exitCode=0 Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.099958 4906 generic.go:334] "Generic (PLEG): container finished" podID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerID="dcbca9bbfa08ac23a876f9d1e338833dc5a2ee1265b1b915283015e7b3f0c54d" exitCode=2 Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.101545 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"54ba4949-578e-4c0a-94d2-0add9be8821d","Type":"ContainerDied","Data":"2163e99c114663ba9c16529fefc749c69ad4a034148d9011a16364b105692df1"} Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.101687 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"54ba4949-578e-4c0a-94d2-0add9be8821d","Type":"ContainerDied","Data":"dcbca9bbfa08ac23a876f9d1e338833dc5a2ee1265b1b915283015e7b3f0c54d"} Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.106342 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bswj\" (UniqueName: \"kubernetes.io/projected/b9e7b71f-1494-49d7-9c59-a5de95c6f7a6-kube-api-access-2bswj\") pod \"swift-proxy-8d74fdd59-xdxbd\" (UID: \"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6\") " pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.174763 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.219730 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:46 crc kubenswrapper[4906]: I0227 08:51:46.979689 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-8d74fdd59-xdxbd"] Feb 27 08:51:47 crc kubenswrapper[4906]: I0227 08:51:47.125383 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-8d74fdd59-xdxbd" event={"ID":"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6","Type":"ContainerStarted","Data":"b492dbc8d7fcb787672cb2050f58d97173316686dc27b5ed9b04a00f30015349"} Feb 27 08:51:47 crc kubenswrapper[4906]: I0227 08:51:47.136527 4906 generic.go:334] "Generic (PLEG): container finished" podID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerID="d3eee1edec5c66b8097262495ce3f3fd6e5efbf8bebc77452174e5b6cf36c899" exitCode=0 Feb 27 08:51:47 crc kubenswrapper[4906]: I0227 08:51:47.136814 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="a0065e88-26a7-4491-ac2a-0c22c054b839" containerName="cinder-scheduler" containerID="cri-o://ba3762128c2d8cc88294ce87f0aef06ff265adabbd433454b72446ccb1492f56" gracePeriod=30 Feb 27 08:51:47 crc kubenswrapper[4906]: I0227 08:51:47.137157 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"54ba4949-578e-4c0a-94d2-0add9be8821d","Type":"ContainerDied","Data":"d3eee1edec5c66b8097262495ce3f3fd6e5efbf8bebc77452174e5b6cf36c899"} Feb 27 08:51:47 crc kubenswrapper[4906]: I0227 08:51:47.137551 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="a0065e88-26a7-4491-ac2a-0c22c054b839" containerName="probe" containerID="cri-o://eb23296070858ebf704087e3d6751e05296e7b8805f8dbc7ece16860fe6c4a6a" gracePeriod=30 Feb 27 08:51:47 crc kubenswrapper[4906]: I0227 08:51:47.377238 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:51:47 crc kubenswrapper[4906]: I0227 08:51:47.474429 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fb745b69-c9pjn"] Feb 27 08:51:47 crc kubenswrapper[4906]: I0227 08:51:47.477139 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-fb745b69-c9pjn" podUID="573121fa-4108-4583-9df9-cab33de5b148" containerName="dnsmasq-dns" containerID="cri-o://95e95a754faab71e5c8c6ae374038de1a613ad1a7d9edf0a1d1eb41fe3601bcc" gracePeriod=10 Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.176237 4906 generic.go:334] "Generic (PLEG): container finished" podID="573121fa-4108-4583-9df9-cab33de5b148" containerID="95e95a754faab71e5c8c6ae374038de1a613ad1a7d9edf0a1d1eb41fe3601bcc" exitCode=0 Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.176712 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fb745b69-c9pjn" event={"ID":"573121fa-4108-4583-9df9-cab33de5b148","Type":"ContainerDied","Data":"95e95a754faab71e5c8c6ae374038de1a613ad1a7d9edf0a1d1eb41fe3601bcc"} Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.193003 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-8d74fdd59-xdxbd" event={"ID":"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6","Type":"ContainerStarted","Data":"073ef9fcc0944420bac5c351e25e69ae992c844a0692c9e5feb06ba644fe069c"} Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.193079 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-8d74fdd59-xdxbd" event={"ID":"b9e7b71f-1494-49d7-9c59-a5de95c6f7a6","Type":"ContainerStarted","Data":"f763cd6084f7ec017dc244f7b054249600dceaf11e11ff8c28ffd1e83c58645e"} Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.194920 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.194966 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.197243 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5dfbf7b87b-s7l5j" podUID="f95124f9-c5e8-4445-9885-03cc47172f2f" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.165:9311/healthcheck\": read tcp 10.217.0.2:36486->10.217.0.165:9311: read: connection reset by peer" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.197610 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5dfbf7b87b-s7l5j" podUID="f95124f9-c5e8-4445-9885-03cc47172f2f" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.165:9311/healthcheck\": read tcp 10.217.0.2:36480->10.217.0.165:9311: read: connection reset by peer" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.229231 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-8d74fdd59-xdxbd" podStartSLOduration=3.229203236 podStartE2EDuration="3.229203236s" podCreationTimestamp="2026-02-27 08:51:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:51:48.219464321 +0000 UTC m=+1406.613865941" watchObservedRunningTime="2026-02-27 08:51:48.229203236 +0000 UTC m=+1406.623604846" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.362251 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.464148 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-config\") pod \"573121fa-4108-4583-9df9-cab33de5b148\" (UID: \"573121fa-4108-4583-9df9-cab33de5b148\") " Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.464220 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-ovsdbserver-nb\") pod \"573121fa-4108-4583-9df9-cab33de5b148\" (UID: \"573121fa-4108-4583-9df9-cab33de5b148\") " Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.464335 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bllzs\" (UniqueName: \"kubernetes.io/projected/573121fa-4108-4583-9df9-cab33de5b148-kube-api-access-bllzs\") pod \"573121fa-4108-4583-9df9-cab33de5b148\" (UID: \"573121fa-4108-4583-9df9-cab33de5b148\") " Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.464389 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-dns-svc\") pod \"573121fa-4108-4583-9df9-cab33de5b148\" (UID: \"573121fa-4108-4583-9df9-cab33de5b148\") " Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.464444 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-ovsdbserver-sb\") pod \"573121fa-4108-4583-9df9-cab33de5b148\" (UID: \"573121fa-4108-4583-9df9-cab33de5b148\") " Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.479193 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/573121fa-4108-4583-9df9-cab33de5b148-kube-api-access-bllzs" (OuterVolumeSpecName: "kube-api-access-bllzs") pod "573121fa-4108-4583-9df9-cab33de5b148" (UID: "573121fa-4108-4583-9df9-cab33de5b148"). InnerVolumeSpecName "kube-api-access-bllzs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.554858 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "573121fa-4108-4583-9df9-cab33de5b148" (UID: "573121fa-4108-4583-9df9-cab33de5b148"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.566511 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.566559 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bllzs\" (UniqueName: \"kubernetes.io/projected/573121fa-4108-4583-9df9-cab33de5b148-kube-api-access-bllzs\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.571455 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-config" (OuterVolumeSpecName: "config") pod "573121fa-4108-4583-9df9-cab33de5b148" (UID: "573121fa-4108-4583-9df9-cab33de5b148"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.577730 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "573121fa-4108-4583-9df9-cab33de5b148" (UID: "573121fa-4108-4583-9df9-cab33de5b148"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.579473 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "573121fa-4108-4583-9df9-cab33de5b148" (UID: "573121fa-4108-4583-9df9-cab33de5b148"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.670016 4906 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.670071 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.670086 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/573121fa-4108-4583-9df9-cab33de5b148-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.670394 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.160:3000/\": dial tcp 10.217.0.160:3000: connect: connection refused" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.760247 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.801613 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.876766 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f95124f9-c5e8-4445-9885-03cc47172f2f-config-data-custom\") pod \"f95124f9-c5e8-4445-9885-03cc47172f2f\" (UID: \"f95124f9-c5e8-4445-9885-03cc47172f2f\") " Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.876931 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f95124f9-c5e8-4445-9885-03cc47172f2f-combined-ca-bundle\") pod \"f95124f9-c5e8-4445-9885-03cc47172f2f\" (UID: \"f95124f9-c5e8-4445-9885-03cc47172f2f\") " Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.876963 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvbpt\" (UniqueName: \"kubernetes.io/projected/f95124f9-c5e8-4445-9885-03cc47172f2f-kube-api-access-jvbpt\") pod \"f95124f9-c5e8-4445-9885-03cc47172f2f\" (UID: \"f95124f9-c5e8-4445-9885-03cc47172f2f\") " Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.877247 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f95124f9-c5e8-4445-9885-03cc47172f2f-config-data\") pod \"f95124f9-c5e8-4445-9885-03cc47172f2f\" (UID: \"f95124f9-c5e8-4445-9885-03cc47172f2f\") " Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.877287 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f95124f9-c5e8-4445-9885-03cc47172f2f-logs\") pod \"f95124f9-c5e8-4445-9885-03cc47172f2f\" (UID: \"f95124f9-c5e8-4445-9885-03cc47172f2f\") " Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.878223 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f95124f9-c5e8-4445-9885-03cc47172f2f-logs" (OuterVolumeSpecName: "logs") pod "f95124f9-c5e8-4445-9885-03cc47172f2f" (UID: "f95124f9-c5e8-4445-9885-03cc47172f2f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.887800 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f95124f9-c5e8-4445-9885-03cc47172f2f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f95124f9-c5e8-4445-9885-03cc47172f2f" (UID: "f95124f9-c5e8-4445-9885-03cc47172f2f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.889165 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f95124f9-c5e8-4445-9885-03cc47172f2f-kube-api-access-jvbpt" (OuterVolumeSpecName: "kube-api-access-jvbpt") pod "f95124f9-c5e8-4445-9885-03cc47172f2f" (UID: "f95124f9-c5e8-4445-9885-03cc47172f2f"). InnerVolumeSpecName "kube-api-access-jvbpt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.925007 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f95124f9-c5e8-4445-9885-03cc47172f2f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f95124f9-c5e8-4445-9885-03cc47172f2f" (UID: "f95124f9-c5e8-4445-9885-03cc47172f2f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.941200 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f95124f9-c5e8-4445-9885-03cc47172f2f-config-data" (OuterVolumeSpecName: "config-data") pod "f95124f9-c5e8-4445-9885-03cc47172f2f" (UID: "f95124f9-c5e8-4445-9885-03cc47172f2f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.979841 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f95124f9-c5e8-4445-9885-03cc47172f2f-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.980369 4906 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f95124f9-c5e8-4445-9885-03cc47172f2f-logs\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.980382 4906 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f95124f9-c5e8-4445-9885-03cc47172f2f-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.980395 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f95124f9-c5e8-4445-9885-03cc47172f2f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:48 crc kubenswrapper[4906]: I0227 08:51:48.980409 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvbpt\" (UniqueName: \"kubernetes.io/projected/f95124f9-c5e8-4445-9885-03cc47172f2f-kube-api-access-jvbpt\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.225189 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-fb745b69-c9pjn" Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.226231 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-fb745b69-c9pjn" event={"ID":"573121fa-4108-4583-9df9-cab33de5b148","Type":"ContainerDied","Data":"a5ca13e1c18a9c1b1d5b3506f745f7d7be8703a9c703aaae37e3da29d5c6c78e"} Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.226285 4906 scope.go:117] "RemoveContainer" containerID="95e95a754faab71e5c8c6ae374038de1a613ad1a7d9edf0a1d1eb41fe3601bcc" Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.243982 4906 generic.go:334] "Generic (PLEG): container finished" podID="f95124f9-c5e8-4445-9885-03cc47172f2f" containerID="21aa51804a89937917282205a22028c7baebaba56a423bd6d396a960e7be0932" exitCode=0 Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.244067 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dfbf7b87b-s7l5j" event={"ID":"f95124f9-c5e8-4445-9885-03cc47172f2f","Type":"ContainerDied","Data":"21aa51804a89937917282205a22028c7baebaba56a423bd6d396a960e7be0932"} Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.244102 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5dfbf7b87b-s7l5j" event={"ID":"f95124f9-c5e8-4445-9885-03cc47172f2f","Type":"ContainerDied","Data":"a01defaf6cfae3bb571fde4febdf38279525fc752f3ed30bf733554ae7176182"} Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.244188 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5dfbf7b87b-s7l5j" Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.287084 4906 generic.go:334] "Generic (PLEG): container finished" podID="a0065e88-26a7-4491-ac2a-0c22c054b839" containerID="eb23296070858ebf704087e3d6751e05296e7b8805f8dbc7ece16860fe6c4a6a" exitCode=0 Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.287837 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a0065e88-26a7-4491-ac2a-0c22c054b839","Type":"ContainerDied","Data":"eb23296070858ebf704087e3d6751e05296e7b8805f8dbc7ece16860fe6c4a6a"} Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.306041 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.306369 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="92423fdd-529f-46e5-8eff-6241f4a41225" containerName="glance-log" containerID="cri-o://2a17575ed3c2abf6315d8714bb38ce68cde7100200e1d436b228688bf99d3aea" gracePeriod=30 Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.306592 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="92423fdd-529f-46e5-8eff-6241f4a41225" containerName="glance-httpd" containerID="cri-o://3000d068480fff83d0eee45968fb5fc1136ad32fb16cf8a0e3502def1179fd9d" gracePeriod=30 Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.324818 4906 scope.go:117] "RemoveContainer" containerID="bfbbc307e6deb9674dfad51046e7390c52a99e8d747a93daa39f174265db4819" Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.326279 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5dfbf7b87b-s7l5j"] Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.344979 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5dfbf7b87b-s7l5j"] Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.355529 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-fb745b69-c9pjn"] Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.359221 4906 scope.go:117] "RemoveContainer" containerID="21aa51804a89937917282205a22028c7baebaba56a423bd6d396a960e7be0932" Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.364346 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-fb745b69-c9pjn"] Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.427832 4906 scope.go:117] "RemoveContainer" containerID="eeec9240c0f36a56a38f3e13442d988cd51447c258cf19b23a9d9bcf8ae40a3f" Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.507165 4906 scope.go:117] "RemoveContainer" containerID="21aa51804a89937917282205a22028c7baebaba56a423bd6d396a960e7be0932" Feb 27 08:51:49 crc kubenswrapper[4906]: E0227 08:51:49.507912 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21aa51804a89937917282205a22028c7baebaba56a423bd6d396a960e7be0932\": container with ID starting with 21aa51804a89937917282205a22028c7baebaba56a423bd6d396a960e7be0932 not found: ID does not exist" containerID="21aa51804a89937917282205a22028c7baebaba56a423bd6d396a960e7be0932" Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.507953 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21aa51804a89937917282205a22028c7baebaba56a423bd6d396a960e7be0932"} err="failed to get container status \"21aa51804a89937917282205a22028c7baebaba56a423bd6d396a960e7be0932\": rpc error: code = NotFound desc = could not find container \"21aa51804a89937917282205a22028c7baebaba56a423bd6d396a960e7be0932\": container with ID starting with 21aa51804a89937917282205a22028c7baebaba56a423bd6d396a960e7be0932 not found: ID does not exist" Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.507977 4906 scope.go:117] "RemoveContainer" containerID="eeec9240c0f36a56a38f3e13442d988cd51447c258cf19b23a9d9bcf8ae40a3f" Feb 27 08:51:49 crc kubenswrapper[4906]: E0227 08:51:49.508776 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eeec9240c0f36a56a38f3e13442d988cd51447c258cf19b23a9d9bcf8ae40a3f\": container with ID starting with eeec9240c0f36a56a38f3e13442d988cd51447c258cf19b23a9d9bcf8ae40a3f not found: ID does not exist" containerID="eeec9240c0f36a56a38f3e13442d988cd51447c258cf19b23a9d9bcf8ae40a3f" Feb 27 08:51:49 crc kubenswrapper[4906]: I0227 08:51:49.508805 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eeec9240c0f36a56a38f3e13442d988cd51447c258cf19b23a9d9bcf8ae40a3f"} err="failed to get container status \"eeec9240c0f36a56a38f3e13442d988cd51447c258cf19b23a9d9bcf8ae40a3f\": rpc error: code = NotFound desc = could not find container \"eeec9240c0f36a56a38f3e13442d988cd51447c258cf19b23a9d9bcf8ae40a3f\": container with ID starting with eeec9240c0f36a56a38f3e13442d988cd51447c258cf19b23a9d9bcf8ae40a3f not found: ID does not exist" Feb 27 08:51:50 crc kubenswrapper[4906]: I0227 08:51:50.311309 4906 generic.go:334] "Generic (PLEG): container finished" podID="92423fdd-529f-46e5-8eff-6241f4a41225" containerID="2a17575ed3c2abf6315d8714bb38ce68cde7100200e1d436b228688bf99d3aea" exitCode=143 Feb 27 08:51:50 crc kubenswrapper[4906]: I0227 08:51:50.311392 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"92423fdd-529f-46e5-8eff-6241f4a41225","Type":"ContainerDied","Data":"2a17575ed3c2abf6315d8714bb38ce68cde7100200e1d436b228688bf99d3aea"} Feb 27 08:51:50 crc kubenswrapper[4906]: I0227 08:51:50.321019 4906 generic.go:334] "Generic (PLEG): container finished" podID="a0065e88-26a7-4491-ac2a-0c22c054b839" containerID="ba3762128c2d8cc88294ce87f0aef06ff265adabbd433454b72446ccb1492f56" exitCode=0 Feb 27 08:51:50 crc kubenswrapper[4906]: I0227 08:51:50.321171 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a0065e88-26a7-4491-ac2a-0c22c054b839","Type":"ContainerDied","Data":"ba3762128c2d8cc88294ce87f0aef06ff265adabbd433454b72446ccb1492f56"} Feb 27 08:51:50 crc kubenswrapper[4906]: I0227 08:51:50.564868 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="573121fa-4108-4583-9df9-cab33de5b148" path="/var/lib/kubelet/pods/573121fa-4108-4583-9df9-cab33de5b148/volumes" Feb 27 08:51:50 crc kubenswrapper[4906]: I0227 08:51:50.566011 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f95124f9-c5e8-4445-9885-03cc47172f2f" path="/var/lib/kubelet/pods/f95124f9-c5e8-4445-9885-03cc47172f2f/volumes" Feb 27 08:51:51 crc kubenswrapper[4906]: I0227 08:51:51.345168 4906 generic.go:334] "Generic (PLEG): container finished" podID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerID="daeb5e1e83b53f1af0f9d3ed986190ba46ac2bce85e21b6fa6429b1f1a0cfda1" exitCode=0 Feb 27 08:51:51 crc kubenswrapper[4906]: I0227 08:51:51.345450 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"54ba4949-578e-4c0a-94d2-0add9be8821d","Type":"ContainerDied","Data":"daeb5e1e83b53f1af0f9d3ed986190ba46ac2bce85e21b6fa6429b1f1a0cfda1"} Feb 27 08:51:52 crc kubenswrapper[4906]: I0227 08:51:52.795371 4906 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod8f7fb2c4-3404-4a84-9c80-c20f3eb7539d"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod8f7fb2c4-3404-4a84-9c80-c20f3eb7539d] : Timed out while waiting for systemd to remove kubepods-besteffort-pod8f7fb2c4_3404_4a84_9c80_c20f3eb7539d.slice" Feb 27 08:51:52 crc kubenswrapper[4906]: E0227 08:51:52.795759 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod8f7fb2c4-3404-4a84-9c80-c20f3eb7539d] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod8f7fb2c4-3404-4a84-9c80-c20f3eb7539d] : Timed out while waiting for systemd to remove kubepods-besteffort-pod8f7fb2c4_3404_4a84_9c80_c20f3eb7539d.slice" pod="openstack/glance-default-external-api-0" podUID="8f7fb2c4-3404-4a84-9c80-c20f3eb7539d" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.380602 4906 generic.go:334] "Generic (PLEG): container finished" podID="92423fdd-529f-46e5-8eff-6241f4a41225" containerID="3000d068480fff83d0eee45968fb5fc1136ad32fb16cf8a0e3502def1179fd9d" exitCode=0 Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.380708 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.380691 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"92423fdd-529f-46e5-8eff-6241f4a41225","Type":"ContainerDied","Data":"3000d068480fff83d0eee45968fb5fc1136ad32fb16cf8a0e3502def1179fd9d"} Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.414817 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.429790 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.449077 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Feb 27 08:51:53 crc kubenswrapper[4906]: E0227 08:51:53.449577 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f95124f9-c5e8-4445-9885-03cc47172f2f" containerName="barbican-api" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.449595 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f95124f9-c5e8-4445-9885-03cc47172f2f" containerName="barbican-api" Feb 27 08:51:53 crc kubenswrapper[4906]: E0227 08:51:53.449616 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f95124f9-c5e8-4445-9885-03cc47172f2f" containerName="barbican-api-log" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.449623 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f95124f9-c5e8-4445-9885-03cc47172f2f" containerName="barbican-api-log" Feb 27 08:51:53 crc kubenswrapper[4906]: E0227 08:51:53.451019 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="573121fa-4108-4583-9df9-cab33de5b148" containerName="init" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.451038 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="573121fa-4108-4583-9df9-cab33de5b148" containerName="init" Feb 27 08:51:53 crc kubenswrapper[4906]: E0227 08:51:53.451058 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="573121fa-4108-4583-9df9-cab33de5b148" containerName="dnsmasq-dns" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.451069 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="573121fa-4108-4583-9df9-cab33de5b148" containerName="dnsmasq-dns" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.451285 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f95124f9-c5e8-4445-9885-03cc47172f2f" containerName="barbican-api" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.451308 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f95124f9-c5e8-4445-9885-03cc47172f2f" containerName="barbican-api-log" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.451319 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="573121fa-4108-4583-9df9-cab33de5b148" containerName="dnsmasq-dns" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.452525 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.461350 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.461406 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.477920 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.596451 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4503410a-77c1-4da7-b599-f6746affaaf8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.596536 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4503410a-77c1-4da7-b599-f6746affaaf8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.596573 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7d5qd\" (UniqueName: \"kubernetes.io/projected/4503410a-77c1-4da7-b599-f6746affaaf8-kube-api-access-7d5qd\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.596601 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4503410a-77c1-4da7-b599-f6746affaaf8-scripts\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.596627 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4503410a-77c1-4da7-b599-f6746affaaf8-logs\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.596643 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.596671 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4503410a-77c1-4da7-b599-f6746affaaf8-config-data\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.596690 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4503410a-77c1-4da7-b599-f6746affaaf8-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.683017 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="92423fdd-529f-46e5-8eff-6241f4a41225" containerName="glance-httpd" probeResult="failure" output="Get \"https://10.217.0.161:9292/healthcheck\": dial tcp 10.217.0.161:9292: connect: connection refused" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.683093 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/glance-default-internal-api-0" podUID="92423fdd-529f-46e5-8eff-6241f4a41225" containerName="glance-log" probeResult="failure" output="Get \"https://10.217.0.161:9292/healthcheck\": dial tcp 10.217.0.161:9292: connect: connection refused" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.698443 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4503410a-77c1-4da7-b599-f6746affaaf8-logs\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.698505 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.698572 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4503410a-77c1-4da7-b599-f6746affaaf8-config-data\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.698600 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4503410a-77c1-4da7-b599-f6746affaaf8-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.699653 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4503410a-77c1-4da7-b599-f6746affaaf8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.699737 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4503410a-77c1-4da7-b599-f6746affaaf8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.699796 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7d5qd\" (UniqueName: \"kubernetes.io/projected/4503410a-77c1-4da7-b599-f6746affaaf8-kube-api-access-7d5qd\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.699827 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4503410a-77c1-4da7-b599-f6746affaaf8-scripts\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.700015 4906 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.700177 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4503410a-77c1-4da7-b599-f6746affaaf8-logs\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.702218 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/4503410a-77c1-4da7-b599-f6746affaaf8-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.716209 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4503410a-77c1-4da7-b599-f6746affaaf8-scripts\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.716871 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4503410a-77c1-4da7-b599-f6746affaaf8-config-data\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.719743 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4503410a-77c1-4da7-b599-f6746affaaf8-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.728978 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4503410a-77c1-4da7-b599-f6746affaaf8-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.736462 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7d5qd\" (UniqueName: \"kubernetes.io/projected/4503410a-77c1-4da7-b599-f6746affaaf8-kube-api-access-7d5qd\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.768970 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"4503410a-77c1-4da7-b599-f6746affaaf8\") " pod="openstack/glance-default-external-api-0" Feb 27 08:51:53 crc kubenswrapper[4906]: I0227 08:51:53.786596 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Feb 27 08:51:54 crc kubenswrapper[4906]: I0227 08:51:54.572093 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f7fb2c4-3404-4a84-9c80-c20f3eb7539d" path="/var/lib/kubelet/pods/8f7fb2c4-3404-4a84-9c80-c20f3eb7539d/volumes" Feb 27 08:51:54 crc kubenswrapper[4906]: I0227 08:51:54.680002 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:51:54 crc kubenswrapper[4906]: I0227 08:51:54.741492 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:51:56 crc kubenswrapper[4906]: I0227 08:51:56.227296 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:56 crc kubenswrapper[4906]: I0227 08:51:56.227368 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-8d74fdd59-xdxbd" Feb 27 08:51:56 crc kubenswrapper[4906]: I0227 08:51:56.687928 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:51:56 crc kubenswrapper[4906]: I0227 08:51:56.947705 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7f78987f9b-lzmw8" Feb 27 08:51:57 crc kubenswrapper[4906]: I0227 08:51:57.035068 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-98c78d5f8-j9wmn"] Feb 27 08:51:57 crc kubenswrapper[4906]: I0227 08:51:57.448973 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-98c78d5f8-j9wmn" podUID="e6ab1c10-b552-4a69-94c7-68280ab7e126" containerName="horizon" containerID="cri-o://d1136f91f3db289760b67bf78418a2809b37b6cc1c92b62bf1341729971407ed" gracePeriod=30 Feb 27 08:51:57 crc kubenswrapper[4906]: I0227 08:51:57.449000 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-98c78d5f8-j9wmn" podUID="e6ab1c10-b552-4a69-94c7-68280ab7e126" containerName="horizon-log" containerID="cri-o://ede6b37c237a95667008a06057fd933008e9e665abcee2a3c2a28ce6a6594391" gracePeriod=30 Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.325461 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.418011 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-combined-ca-bundle\") pod \"a0065e88-26a7-4491-ac2a-0c22c054b839\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.418149 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqxbc\" (UniqueName: \"kubernetes.io/projected/a0065e88-26a7-4491-ac2a-0c22c054b839-kube-api-access-fqxbc\") pod \"a0065e88-26a7-4491-ac2a-0c22c054b839\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.418197 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-scripts\") pod \"a0065e88-26a7-4491-ac2a-0c22c054b839\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.418378 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a0065e88-26a7-4491-ac2a-0c22c054b839-etc-machine-id\") pod \"a0065e88-26a7-4491-ac2a-0c22c054b839\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.418510 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-config-data-custom\") pod \"a0065e88-26a7-4491-ac2a-0c22c054b839\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.418541 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-config-data\") pod \"a0065e88-26a7-4491-ac2a-0c22c054b839\" (UID: \"a0065e88-26a7-4491-ac2a-0c22c054b839\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.422872 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a0065e88-26a7-4491-ac2a-0c22c054b839-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "a0065e88-26a7-4491-ac2a-0c22c054b839" (UID: "a0065e88-26a7-4491-ac2a-0c22c054b839"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.428871 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-scripts" (OuterVolumeSpecName: "scripts") pod "a0065e88-26a7-4491-ac2a-0c22c054b839" (UID: "a0065e88-26a7-4491-ac2a-0c22c054b839"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.453647 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0065e88-26a7-4491-ac2a-0c22c054b839-kube-api-access-fqxbc" (OuterVolumeSpecName: "kube-api-access-fqxbc") pod "a0065e88-26a7-4491-ac2a-0c22c054b839" (UID: "a0065e88-26a7-4491-ac2a-0c22c054b839"). InnerVolumeSpecName "kube-api-access-fqxbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.459998 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a0065e88-26a7-4491-ac2a-0c22c054b839" (UID: "a0065e88-26a7-4491-ac2a-0c22c054b839"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.484747 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"a0065e88-26a7-4491-ac2a-0c22c054b839","Type":"ContainerDied","Data":"733b7069e621159c5169a359ebe0e431603707176696388c81967e1f89134e69"} Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.484815 4906 scope.go:117] "RemoveContainer" containerID="eb23296070858ebf704087e3d6751e05296e7b8805f8dbc7ece16860fe6c4a6a" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.485059 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.504802 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.524403 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqxbc\" (UniqueName: \"kubernetes.io/projected/a0065e88-26a7-4491-ac2a-0c22c054b839-kube-api-access-fqxbc\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.524449 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.524463 4906 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a0065e88-26a7-4491-ac2a-0c22c054b839-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.524476 4906 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.551002 4906 scope.go:117] "RemoveContainer" containerID="ba3762128c2d8cc88294ce87f0aef06ff265adabbd433454b72446ccb1492f56" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.582472 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a0065e88-26a7-4491-ac2a-0c22c054b839" (UID: "a0065e88-26a7-4491-ac2a-0c22c054b839"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.613228 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.632356 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-combined-ca-bundle\") pod \"54ba4949-578e-4c0a-94d2-0add9be8821d\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.632508 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-scripts\") pod \"54ba4949-578e-4c0a-94d2-0add9be8821d\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.632572 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-config-data\") pod \"54ba4949-578e-4c0a-94d2-0add9be8821d\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.632619 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tx6vd\" (UniqueName: \"kubernetes.io/projected/54ba4949-578e-4c0a-94d2-0add9be8821d-kube-api-access-tx6vd\") pod \"54ba4949-578e-4c0a-94d2-0add9be8821d\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.632662 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/54ba4949-578e-4c0a-94d2-0add9be8821d-run-httpd\") pod \"54ba4949-578e-4c0a-94d2-0add9be8821d\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.632686 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-sg-core-conf-yaml\") pod \"54ba4949-578e-4c0a-94d2-0add9be8821d\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.632726 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/54ba4949-578e-4c0a-94d2-0add9be8821d-log-httpd\") pod \"54ba4949-578e-4c0a-94d2-0add9be8821d\" (UID: \"54ba4949-578e-4c0a-94d2-0add9be8821d\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.633319 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.633343 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54ba4949-578e-4c0a-94d2-0add9be8821d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "54ba4949-578e-4c0a-94d2-0add9be8821d" (UID: "54ba4949-578e-4c0a-94d2-0add9be8821d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.633643 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54ba4949-578e-4c0a-94d2-0add9be8821d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "54ba4949-578e-4c0a-94d2-0add9be8821d" (UID: "54ba4949-578e-4c0a-94d2-0add9be8821d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.638233 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-scripts" (OuterVolumeSpecName: "scripts") pod "54ba4949-578e-4c0a-94d2-0add9be8821d" (UID: "54ba4949-578e-4c0a-94d2-0add9be8821d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.640241 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-config-data" (OuterVolumeSpecName: "config-data") pod "a0065e88-26a7-4491-ac2a-0c22c054b839" (UID: "a0065e88-26a7-4491-ac2a-0c22c054b839"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.641310 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54ba4949-578e-4c0a-94d2-0add9be8821d-kube-api-access-tx6vd" (OuterVolumeSpecName: "kube-api-access-tx6vd") pod "54ba4949-578e-4c0a-94d2-0add9be8821d" (UID: "54ba4949-578e-4c0a-94d2-0add9be8821d"). InnerVolumeSpecName "kube-api-access-tx6vd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.682494 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "54ba4949-578e-4c0a-94d2-0add9be8821d" (UID: "54ba4949-578e-4c0a-94d2-0add9be8821d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.686907 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.734649 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-internal-tls-certs\") pod \"92423fdd-529f-46e5-8eff-6241f4a41225\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.735058 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"92423fdd-529f-46e5-8eff-6241f4a41225\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.735233 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-config-data\") pod \"92423fdd-529f-46e5-8eff-6241f4a41225\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.735349 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-scripts\") pod \"92423fdd-529f-46e5-8eff-6241f4a41225\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.735470 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtmcc\" (UniqueName: \"kubernetes.io/projected/92423fdd-529f-46e5-8eff-6241f4a41225-kube-api-access-xtmcc\") pod \"92423fdd-529f-46e5-8eff-6241f4a41225\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.735672 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/92423fdd-529f-46e5-8eff-6241f4a41225-httpd-run\") pod \"92423fdd-529f-46e5-8eff-6241f4a41225\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.735965 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-combined-ca-bundle\") pod \"92423fdd-529f-46e5-8eff-6241f4a41225\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.736086 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92423fdd-529f-46e5-8eff-6241f4a41225-logs\") pod \"92423fdd-529f-46e5-8eff-6241f4a41225\" (UID: \"92423fdd-529f-46e5-8eff-6241f4a41225\") " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.737249 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.737357 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0065e88-26a7-4491-ac2a-0c22c054b839-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.737552 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tx6vd\" (UniqueName: \"kubernetes.io/projected/54ba4949-578e-4c0a-94d2-0add9be8821d-kube-api-access-tx6vd\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.737634 4906 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/54ba4949-578e-4c0a-94d2-0add9be8821d-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.737720 4906 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.737795 4906 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/54ba4949-578e-4c0a-94d2-0add9be8821d-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.738462 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92423fdd-529f-46e5-8eff-6241f4a41225-logs" (OuterVolumeSpecName: "logs") pod "92423fdd-529f-46e5-8eff-6241f4a41225" (UID: "92423fdd-529f-46e5-8eff-6241f4a41225"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.739795 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/92423fdd-529f-46e5-8eff-6241f4a41225-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "92423fdd-529f-46e5-8eff-6241f4a41225" (UID: "92423fdd-529f-46e5-8eff-6241f4a41225"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.744109 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-scripts" (OuterVolumeSpecName: "scripts") pod "92423fdd-529f-46e5-8eff-6241f4a41225" (UID: "92423fdd-529f-46e5-8eff-6241f4a41225"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.749717 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92423fdd-529f-46e5-8eff-6241f4a41225-kube-api-access-xtmcc" (OuterVolumeSpecName: "kube-api-access-xtmcc") pod "92423fdd-529f-46e5-8eff-6241f4a41225" (UID: "92423fdd-529f-46e5-8eff-6241f4a41225"). InnerVolumeSpecName "kube-api-access-xtmcc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.752416 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "92423fdd-529f-46e5-8eff-6241f4a41225" (UID: "92423fdd-529f-46e5-8eff-6241f4a41225"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.764088 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "54ba4949-578e-4c0a-94d2-0add9be8821d" (UID: "54ba4949-578e-4c0a-94d2-0add9be8821d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.778733 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "92423fdd-529f-46e5-8eff-6241f4a41225" (UID: "92423fdd-529f-46e5-8eff-6241f4a41225"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.825132 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "92423fdd-529f-46e5-8eff-6241f4a41225" (UID: "92423fdd-529f-46e5-8eff-6241f4a41225"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.840089 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.840510 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.840608 4906 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/92423fdd-529f-46e5-8eff-6241f4a41225-logs\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.840695 4906 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.840821 4906 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.840956 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.841046 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtmcc\" (UniqueName: \"kubernetes.io/projected/92423fdd-529f-46e5-8eff-6241f4a41225-kube-api-access-xtmcc\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.841132 4906 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/92423fdd-529f-46e5-8eff-6241f4a41225-httpd-run\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.847370 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-config-data" (OuterVolumeSpecName: "config-data") pod "54ba4949-578e-4c0a-94d2-0add9be8821d" (UID: "54ba4949-578e-4c0a-94d2-0add9be8821d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.885868 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-config-data" (OuterVolumeSpecName: "config-data") pod "92423fdd-529f-46e5-8eff-6241f4a41225" (UID: "92423fdd-529f-46e5-8eff-6241f4a41225"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.913004 4906 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.944665 4906 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.944705 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92423fdd-529f-46e5-8eff-6241f4a41225-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:58 crc kubenswrapper[4906]: I0227 08:51:58.944716 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54ba4949-578e-4c0a-94d2-0add9be8821d-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.030859 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.065833 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.081318 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Feb 27 08:51:59 crc kubenswrapper[4906]: E0227 08:51:59.082011 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0065e88-26a7-4491-ac2a-0c22c054b839" containerName="probe" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.082033 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0065e88-26a7-4491-ac2a-0c22c054b839" containerName="probe" Feb 27 08:51:59 crc kubenswrapper[4906]: E0227 08:51:59.082059 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0065e88-26a7-4491-ac2a-0c22c054b839" containerName="cinder-scheduler" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.082067 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0065e88-26a7-4491-ac2a-0c22c054b839" containerName="cinder-scheduler" Feb 27 08:51:59 crc kubenswrapper[4906]: E0227 08:51:59.082085 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92423fdd-529f-46e5-8eff-6241f4a41225" containerName="glance-httpd" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.082092 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="92423fdd-529f-46e5-8eff-6241f4a41225" containerName="glance-httpd" Feb 27 08:51:59 crc kubenswrapper[4906]: E0227 08:51:59.082107 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerName="proxy-httpd" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.082112 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerName="proxy-httpd" Feb 27 08:51:59 crc kubenswrapper[4906]: E0227 08:51:59.082142 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerName="ceilometer-central-agent" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.082148 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerName="ceilometer-central-agent" Feb 27 08:51:59 crc kubenswrapper[4906]: E0227 08:51:59.087979 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerName="sg-core" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.088046 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerName="sg-core" Feb 27 08:51:59 crc kubenswrapper[4906]: E0227 08:51:59.088069 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92423fdd-529f-46e5-8eff-6241f4a41225" containerName="glance-log" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.088076 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="92423fdd-529f-46e5-8eff-6241f4a41225" containerName="glance-log" Feb 27 08:51:59 crc kubenswrapper[4906]: E0227 08:51:59.088096 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerName="ceilometer-notification-agent" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.088102 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerName="ceilometer-notification-agent" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.088461 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerName="ceilometer-notification-agent" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.088490 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerName="ceilometer-central-agent" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.088505 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0065e88-26a7-4491-ac2a-0c22c054b839" containerName="cinder-scheduler" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.088705 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerName="proxy-httpd" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.088715 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0065e88-26a7-4491-ac2a-0c22c054b839" containerName="probe" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.088728 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="92423fdd-529f-46e5-8eff-6241f4a41225" containerName="glance-httpd" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.088742 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="92423fdd-529f-46e5-8eff-6241f4a41225" containerName="glance-log" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.088753 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="54ba4949-578e-4c0a-94d2-0add9be8821d" containerName="sg-core" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.090098 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.094350 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.108260 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.129006 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-75hmz"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.130996 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-75hmz" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.147719 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-75hmz"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.154419 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/16a6f839-050d-49e8-b788-3ff1f5e46329-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"16a6f839-050d-49e8-b788-3ff1f5e46329\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.154473 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqh6v\" (UniqueName: \"kubernetes.io/projected/16a6f839-050d-49e8-b788-3ff1f5e46329-kube-api-access-sqh6v\") pod \"cinder-scheduler-0\" (UID: \"16a6f839-050d-49e8-b788-3ff1f5e46329\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.154520 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16a6f839-050d-49e8-b788-3ff1f5e46329-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"16a6f839-050d-49e8-b788-3ff1f5e46329\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.154556 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c831978a-e34a-46bf-99c4-bfdffd022f43-operator-scripts\") pod \"nova-api-db-create-75hmz\" (UID: \"c831978a-e34a-46bf-99c4-bfdffd022f43\") " pod="openstack/nova-api-db-create-75hmz" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.154633 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16a6f839-050d-49e8-b788-3ff1f5e46329-scripts\") pod \"cinder-scheduler-0\" (UID: \"16a6f839-050d-49e8-b788-3ff1f5e46329\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.154686 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/16a6f839-050d-49e8-b788-3ff1f5e46329-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"16a6f839-050d-49e8-b788-3ff1f5e46329\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.154719 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zgkm\" (UniqueName: \"kubernetes.io/projected/c831978a-e34a-46bf-99c4-bfdffd022f43-kube-api-access-8zgkm\") pod \"nova-api-db-create-75hmz\" (UID: \"c831978a-e34a-46bf-99c4-bfdffd022f43\") " pod="openstack/nova-api-db-create-75hmz" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.154752 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16a6f839-050d-49e8-b788-3ff1f5e46329-config-data\") pod \"cinder-scheduler-0\" (UID: \"16a6f839-050d-49e8-b788-3ff1f5e46329\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.178612 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0b66-account-create-update-t85cr"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.185740 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0b66-account-create-update-t85cr" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.188139 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.197121 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-5hsm6"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.199060 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-5hsm6" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.207959 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0b66-account-create-update-t85cr"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.225377 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-5hsm6"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.257470 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16a6f839-050d-49e8-b788-3ff1f5e46329-config-data\") pod \"cinder-scheduler-0\" (UID: \"16a6f839-050d-49e8-b788-3ff1f5e46329\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.257584 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/16a6f839-050d-49e8-b788-3ff1f5e46329-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"16a6f839-050d-49e8-b788-3ff1f5e46329\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.257626 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqh6v\" (UniqueName: \"kubernetes.io/projected/16a6f839-050d-49e8-b788-3ff1f5e46329-kube-api-access-sqh6v\") pod \"cinder-scheduler-0\" (UID: \"16a6f839-050d-49e8-b788-3ff1f5e46329\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.257679 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16a6f839-050d-49e8-b788-3ff1f5e46329-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"16a6f839-050d-49e8-b788-3ff1f5e46329\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.257721 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c831978a-e34a-46bf-99c4-bfdffd022f43-operator-scripts\") pod \"nova-api-db-create-75hmz\" (UID: \"c831978a-e34a-46bf-99c4-bfdffd022f43\") " pod="openstack/nova-api-db-create-75hmz" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.257806 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16a6f839-050d-49e8-b788-3ff1f5e46329-scripts\") pod \"cinder-scheduler-0\" (UID: \"16a6f839-050d-49e8-b788-3ff1f5e46329\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.257829 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/16a6f839-050d-49e8-b788-3ff1f5e46329-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"16a6f839-050d-49e8-b788-3ff1f5e46329\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.257866 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/16a6f839-050d-49e8-b788-3ff1f5e46329-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"16a6f839-050d-49e8-b788-3ff1f5e46329\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.257984 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zgkm\" (UniqueName: \"kubernetes.io/projected/c831978a-e34a-46bf-99c4-bfdffd022f43-kube-api-access-8zgkm\") pod \"nova-api-db-create-75hmz\" (UID: \"c831978a-e34a-46bf-99c4-bfdffd022f43\") " pod="openstack/nova-api-db-create-75hmz" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.259373 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c831978a-e34a-46bf-99c4-bfdffd022f43-operator-scripts\") pod \"nova-api-db-create-75hmz\" (UID: \"c831978a-e34a-46bf-99c4-bfdffd022f43\") " pod="openstack/nova-api-db-create-75hmz" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.262225 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16a6f839-050d-49e8-b788-3ff1f5e46329-config-data\") pod \"cinder-scheduler-0\" (UID: \"16a6f839-050d-49e8-b788-3ff1f5e46329\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.262515 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/16a6f839-050d-49e8-b788-3ff1f5e46329-scripts\") pod \"cinder-scheduler-0\" (UID: \"16a6f839-050d-49e8-b788-3ff1f5e46329\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.269963 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/16a6f839-050d-49e8-b788-3ff1f5e46329-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"16a6f839-050d-49e8-b788-3ff1f5e46329\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.272734 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16a6f839-050d-49e8-b788-3ff1f5e46329-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"16a6f839-050d-49e8-b788-3ff1f5e46329\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.277856 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqh6v\" (UniqueName: \"kubernetes.io/projected/16a6f839-050d-49e8-b788-3ff1f5e46329-kube-api-access-sqh6v\") pod \"cinder-scheduler-0\" (UID: \"16a6f839-050d-49e8-b788-3ff1f5e46329\") " pod="openstack/cinder-scheduler-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.278206 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zgkm\" (UniqueName: \"kubernetes.io/projected/c831978a-e34a-46bf-99c4-bfdffd022f43-kube-api-access-8zgkm\") pod \"nova-api-db-create-75hmz\" (UID: \"c831978a-e34a-46bf-99c4-bfdffd022f43\") " pod="openstack/nova-api-db-create-75hmz" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.360595 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvjdv\" (UniqueName: \"kubernetes.io/projected/1d2a80d9-824f-416e-a035-de905d169e5d-kube-api-access-nvjdv\") pod \"nova-api-0b66-account-create-update-t85cr\" (UID: \"1d2a80d9-824f-416e-a035-de905d169e5d\") " pod="openstack/nova-api-0b66-account-create-update-t85cr" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.360680 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d2a80d9-824f-416e-a035-de905d169e5d-operator-scripts\") pod \"nova-api-0b66-account-create-update-t85cr\" (UID: \"1d2a80d9-824f-416e-a035-de905d169e5d\") " pod="openstack/nova-api-0b66-account-create-update-t85cr" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.360728 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9b86e3b5-b8e0-48ce-8903-4dc93860723f-operator-scripts\") pod \"nova-cell0-db-create-5hsm6\" (UID: \"9b86e3b5-b8e0-48ce-8903-4dc93860723f\") " pod="openstack/nova-cell0-db-create-5hsm6" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.360840 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hz9lv\" (UniqueName: \"kubernetes.io/projected/9b86e3b5-b8e0-48ce-8903-4dc93860723f-kube-api-access-hz9lv\") pod \"nova-cell0-db-create-5hsm6\" (UID: \"9b86e3b5-b8e0-48ce-8903-4dc93860723f\") " pod="openstack/nova-cell0-db-create-5hsm6" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.380022 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-5654-account-create-update-ljxcj"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.384905 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5654-account-create-update-ljxcj" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.392062 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-cjk9b"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.394553 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-cjk9b" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.395398 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.406851 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-5654-account-create-update-ljxcj"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.429573 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.430455 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-cjk9b"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.455204 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-75hmz" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.462675 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f0033485-16b3-4a98-a7c2-588f320dddee-operator-scripts\") pod \"nova-cell0-5654-account-create-update-ljxcj\" (UID: \"f0033485-16b3-4a98-a7c2-588f320dddee\") " pod="openstack/nova-cell0-5654-account-create-update-ljxcj" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.462742 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvjdv\" (UniqueName: \"kubernetes.io/projected/1d2a80d9-824f-416e-a035-de905d169e5d-kube-api-access-nvjdv\") pod \"nova-api-0b66-account-create-update-t85cr\" (UID: \"1d2a80d9-824f-416e-a035-de905d169e5d\") " pod="openstack/nova-api-0b66-account-create-update-t85cr" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.462765 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d2a80d9-824f-416e-a035-de905d169e5d-operator-scripts\") pod \"nova-api-0b66-account-create-update-t85cr\" (UID: \"1d2a80d9-824f-416e-a035-de905d169e5d\") " pod="openstack/nova-api-0b66-account-create-update-t85cr" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.462789 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flqvf\" (UniqueName: \"kubernetes.io/projected/f0033485-16b3-4a98-a7c2-588f320dddee-kube-api-access-flqvf\") pod \"nova-cell0-5654-account-create-update-ljxcj\" (UID: \"f0033485-16b3-4a98-a7c2-588f320dddee\") " pod="openstack/nova-cell0-5654-account-create-update-ljxcj" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.462815 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9b86e3b5-b8e0-48ce-8903-4dc93860723f-operator-scripts\") pod \"nova-cell0-db-create-5hsm6\" (UID: \"9b86e3b5-b8e0-48ce-8903-4dc93860723f\") " pod="openstack/nova-cell0-db-create-5hsm6" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.462852 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpkxd\" (UniqueName: \"kubernetes.io/projected/6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b-kube-api-access-xpkxd\") pod \"nova-cell1-db-create-cjk9b\" (UID: \"6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b\") " pod="openstack/nova-cell1-db-create-cjk9b" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.462922 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b-operator-scripts\") pod \"nova-cell1-db-create-cjk9b\" (UID: \"6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b\") " pod="openstack/nova-cell1-db-create-cjk9b" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.462962 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hz9lv\" (UniqueName: \"kubernetes.io/projected/9b86e3b5-b8e0-48ce-8903-4dc93860723f-kube-api-access-hz9lv\") pod \"nova-cell0-db-create-5hsm6\" (UID: \"9b86e3b5-b8e0-48ce-8903-4dc93860723f\") " pod="openstack/nova-cell0-db-create-5hsm6" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.463688 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9b86e3b5-b8e0-48ce-8903-4dc93860723f-operator-scripts\") pod \"nova-cell0-db-create-5hsm6\" (UID: \"9b86e3b5-b8e0-48ce-8903-4dc93860723f\") " pod="openstack/nova-cell0-db-create-5hsm6" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.465615 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d2a80d9-824f-416e-a035-de905d169e5d-operator-scripts\") pod \"nova-api-0b66-account-create-update-t85cr\" (UID: \"1d2a80d9-824f-416e-a035-de905d169e5d\") " pod="openstack/nova-api-0b66-account-create-update-t85cr" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.480226 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hz9lv\" (UniqueName: \"kubernetes.io/projected/9b86e3b5-b8e0-48ce-8903-4dc93860723f-kube-api-access-hz9lv\") pod \"nova-cell0-db-create-5hsm6\" (UID: \"9b86e3b5-b8e0-48ce-8903-4dc93860723f\") " pod="openstack/nova-cell0-db-create-5hsm6" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.483359 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvjdv\" (UniqueName: \"kubernetes.io/projected/1d2a80d9-824f-416e-a035-de905d169e5d-kube-api-access-nvjdv\") pod \"nova-api-0b66-account-create-update-t85cr\" (UID: \"1d2a80d9-824f-416e-a035-de905d169e5d\") " pod="openstack/nova-api-0b66-account-create-update-t85cr" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.505234 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.505327 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"92423fdd-529f-46e5-8eff-6241f4a41225","Type":"ContainerDied","Data":"41acc78ef1918a15d91766f8fe646264e7a418a42bb9f15ba1d5487c3fa09d72"} Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.505516 4906 scope.go:117] "RemoveContainer" containerID="3000d068480fff83d0eee45968fb5fc1136ad32fb16cf8a0e3502def1179fd9d" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.508162 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"e4bf0f4a-8f16-4255-8d40-37826771ba47","Type":"ContainerStarted","Data":"2f7c52f9f62a07a21f9d864bb6659df4560c251c804889b251a8543ff7e6e657"} Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.522148 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0b66-account-create-update-t85cr" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.532079 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.532113 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"54ba4949-578e-4c0a-94d2-0add9be8821d","Type":"ContainerDied","Data":"32769346475cf4bc41ee2a3b882eb982b137ea8d0e63ea1764dcffa7360cdf70"} Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.543692 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-5hsm6" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.565396 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpkxd\" (UniqueName: \"kubernetes.io/projected/6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b-kube-api-access-xpkxd\") pod \"nova-cell1-db-create-cjk9b\" (UID: \"6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b\") " pod="openstack/nova-cell1-db-create-cjk9b" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.565766 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b-operator-scripts\") pod \"nova-cell1-db-create-cjk9b\" (UID: \"6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b\") " pod="openstack/nova-cell1-db-create-cjk9b" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.565830 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f0033485-16b3-4a98-a7c2-588f320dddee-operator-scripts\") pod \"nova-cell0-5654-account-create-update-ljxcj\" (UID: \"f0033485-16b3-4a98-a7c2-588f320dddee\") " pod="openstack/nova-cell0-5654-account-create-update-ljxcj" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.565895 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flqvf\" (UniqueName: \"kubernetes.io/projected/f0033485-16b3-4a98-a7c2-588f320dddee-kube-api-access-flqvf\") pod \"nova-cell0-5654-account-create-update-ljxcj\" (UID: \"f0033485-16b3-4a98-a7c2-588f320dddee\") " pod="openstack/nova-cell0-5654-account-create-update-ljxcj" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.565489 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4503410a-77c1-4da7-b599-f6746affaaf8","Type":"ContainerStarted","Data":"11b84735290482a5bf0db15708128e74765dd637b65b44e93b5c49523bb77416"} Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.567004 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f0033485-16b3-4a98-a7c2-588f320dddee-operator-scripts\") pod \"nova-cell0-5654-account-create-update-ljxcj\" (UID: \"f0033485-16b3-4a98-a7c2-588f320dddee\") " pod="openstack/nova-cell0-5654-account-create-update-ljxcj" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.567252 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b-operator-scripts\") pod \"nova-cell1-db-create-cjk9b\" (UID: \"6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b\") " pod="openstack/nova-cell1-db-create-cjk9b" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.580728 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-9692-account-create-update-c6k95"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.582811 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9692-account-create-update-c6k95" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.586198 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.588035 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.9213129159999998 podStartE2EDuration="26.588006222s" podCreationTimestamp="2026-02-27 08:51:33 +0000 UTC" firstStartedPulling="2026-02-27 08:51:35.616572192 +0000 UTC m=+1394.010973802" lastFinishedPulling="2026-02-27 08:51:58.283265498 +0000 UTC m=+1416.677667108" observedRunningTime="2026-02-27 08:51:59.548514736 +0000 UTC m=+1417.942916356" watchObservedRunningTime="2026-02-27 08:51:59.588006222 +0000 UTC m=+1417.982407832" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.591611 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpkxd\" (UniqueName: \"kubernetes.io/projected/6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b-kube-api-access-xpkxd\") pod \"nova-cell1-db-create-cjk9b\" (UID: \"6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b\") " pod="openstack/nova-cell1-db-create-cjk9b" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.602048 4906 scope.go:117] "RemoveContainer" containerID="2a17575ed3c2abf6315d8714bb38ce68cde7100200e1d436b228688bf99d3aea" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.615894 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flqvf\" (UniqueName: \"kubernetes.io/projected/f0033485-16b3-4a98-a7c2-588f320dddee-kube-api-access-flqvf\") pod \"nova-cell0-5654-account-create-update-ljxcj\" (UID: \"f0033485-16b3-4a98-a7c2-588f320dddee\") " pod="openstack/nova-cell0-5654-account-create-update-ljxcj" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.635529 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-9692-account-create-update-c6k95"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.670388 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82-operator-scripts\") pod \"nova-cell1-9692-account-create-update-c6k95\" (UID: \"d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82\") " pod="openstack/nova-cell1-9692-account-create-update-c6k95" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.670527 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jk6rp\" (UniqueName: \"kubernetes.io/projected/d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82-kube-api-access-jk6rp\") pod \"nova-cell1-9692-account-create-update-c6k95\" (UID: \"d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82\") " pod="openstack/nova-cell1-9692-account-create-update-c6k95" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.672214 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.688546 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.705954 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.721915 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.738644 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5654-account-create-update-ljxcj" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.756213 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-cjk9b" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.764063 4906 scope.go:117] "RemoveContainer" containerID="2163e99c114663ba9c16529fefc749c69ad4a034148d9011a16364b105692df1" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.773361 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82-operator-scripts\") pod \"nova-cell1-9692-account-create-update-c6k95\" (UID: \"d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82\") " pod="openstack/nova-cell1-9692-account-create-update-c6k95" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.773594 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jk6rp\" (UniqueName: \"kubernetes.io/projected/d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82-kube-api-access-jk6rp\") pod \"nova-cell1-9692-account-create-update-c6k95\" (UID: \"d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82\") " pod="openstack/nova-cell1-9692-account-create-update-c6k95" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.774750 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82-operator-scripts\") pod \"nova-cell1-9692-account-create-update-c6k95\" (UID: \"d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82\") " pod="openstack/nova-cell1-9692-account-create-update-c6k95" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.777186 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.781718 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.801141 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.801356 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.813004 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.839003 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jk6rp\" (UniqueName: \"kubernetes.io/projected/d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82-kube-api-access-jk6rp\") pod \"nova-cell1-9692-account-create-update-c6k95\" (UID: \"d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82\") " pod="openstack/nova-cell1-9692-account-create-update-c6k95" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.837975 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.844624 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.844935 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.933854 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 27 08:51:59 crc kubenswrapper[4906]: I0227 08:51:59.947592 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9692-account-create-update-c6k95" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.003143 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.010104 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plw85\" (UniqueName: \"kubernetes.io/projected/632dcbd3-7f39-484b-92db-ad2f1b574361-kube-api-access-plw85\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.010163 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.010261 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.010328 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/632dcbd3-7f39-484b-92db-ad2f1b574361-run-httpd\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.010354 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/632dcbd3-7f39-484b-92db-ad2f1b574361-log-httpd\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.010393 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-logs\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.010430 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-scripts\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.010468 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-config-data\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.010535 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.010556 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.010642 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ct4gp\" (UniqueName: \"kubernetes.io/projected/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-kube-api-access-ct4gp\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.010681 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.010728 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.010765 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.021334 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.093490 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-75hmz"] Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.137734 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.137933 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/632dcbd3-7f39-484b-92db-ad2f1b574361-run-httpd\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.138003 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/632dcbd3-7f39-484b-92db-ad2f1b574361-log-httpd\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.138095 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-logs\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.138161 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-scripts\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.138218 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-config-data\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.138350 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.138385 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.138542 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ct4gp\" (UniqueName: \"kubernetes.io/projected/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-kube-api-access-ct4gp\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.138609 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.138632 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.138706 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.138838 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/632dcbd3-7f39-484b-92db-ad2f1b574361-log-httpd\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.138873 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.139016 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plw85\" (UniqueName: \"kubernetes.io/projected/632dcbd3-7f39-484b-92db-ad2f1b574361-kube-api-access-plw85\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.139026 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/632dcbd3-7f39-484b-92db-ad2f1b574361-run-httpd\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.139113 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.139252 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-logs\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.138084 4906 scope.go:117] "RemoveContainer" containerID="dcbca9bbfa08ac23a876f9d1e338833dc5a2ee1265b1b915283015e7b3f0c54d" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.139702 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.139845 4906 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.159821 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.166632 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.172225 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-scripts\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.173905 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.174630 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-config-data\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.178014 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.179008 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.181140 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plw85\" (UniqueName: \"kubernetes.io/projected/632dcbd3-7f39-484b-92db-ad2f1b574361-kube-api-access-plw85\") pod \"ceilometer-0\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.182095 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.186298 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ct4gp\" (UniqueName: \"kubernetes.io/projected/e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38-kube-api-access-ct4gp\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.194891 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536372-clhfq"] Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.197090 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536372-clhfq" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.200914 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.200960 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.201395 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.203017 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-internal-api-0\" (UID: \"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38\") " pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.215283 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536372-clhfq"] Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.238735 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.241960 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxddz\" (UniqueName: \"kubernetes.io/projected/2ba4b224-b847-4ce9-a4f4-52fa5001cb2d-kube-api-access-rxddz\") pod \"auto-csr-approver-29536372-clhfq\" (UID: \"2ba4b224-b847-4ce9-a4f4-52fa5001cb2d\") " pod="openshift-infra/auto-csr-approver-29536372-clhfq" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.264824 4906 scope.go:117] "RemoveContainer" containerID="daeb5e1e83b53f1af0f9d3ed986190ba46ac2bce85e21b6fa6429b1f1a0cfda1" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.351402 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxddz\" (UniqueName: \"kubernetes.io/projected/2ba4b224-b847-4ce9-a4f4-52fa5001cb2d-kube-api-access-rxddz\") pod \"auto-csr-approver-29536372-clhfq\" (UID: \"2ba4b224-b847-4ce9-a4f4-52fa5001cb2d\") " pod="openshift-infra/auto-csr-approver-29536372-clhfq" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.354817 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.375442 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.396822 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxddz\" (UniqueName: \"kubernetes.io/projected/2ba4b224-b847-4ce9-a4f4-52fa5001cb2d-kube-api-access-rxddz\") pod \"auto-csr-approver-29536372-clhfq\" (UID: \"2ba4b224-b847-4ce9-a4f4-52fa5001cb2d\") " pod="openshift-infra/auto-csr-approver-29536372-clhfq" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.412952 4906 scope.go:117] "RemoveContainer" containerID="d3eee1edec5c66b8097262495ce3f3fd6e5efbf8bebc77452174e5b6cf36c899" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.580038 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536372-clhfq" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.586607 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54ba4949-578e-4c0a-94d2-0add9be8821d" path="/var/lib/kubelet/pods/54ba4949-578e-4c0a-94d2-0add9be8821d/volumes" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.589424 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92423fdd-529f-46e5-8eff-6241f4a41225" path="/var/lib/kubelet/pods/92423fdd-529f-46e5-8eff-6241f4a41225/volumes" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.590900 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0065e88-26a7-4491-ac2a-0c22c054b839" path="/var/lib/kubelet/pods/a0065e88-26a7-4491-ac2a-0c22c054b839/volumes" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.611915 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"16a6f839-050d-49e8-b788-3ff1f5e46329","Type":"ContainerStarted","Data":"595f7fec9e8501d3d7bd69979d953e9a7c393e4fbfc11e00481e54cc75ca6463"} Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.624731 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4503410a-77c1-4da7-b599-f6746affaaf8","Type":"ContainerStarted","Data":"43737abbeaa7422479840a78b5338736771113284b4d8f6f869596f78e927767"} Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.635615 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-75hmz" event={"ID":"c831978a-e34a-46bf-99c4-bfdffd022f43","Type":"ContainerStarted","Data":"76b339b33a0b28bc3f6fa912f3e45da2330cbcbbea668116e53832ff3901ea61"} Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.635658 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-75hmz" event={"ID":"c831978a-e34a-46bf-99c4-bfdffd022f43","Type":"ContainerStarted","Data":"baa54e097fe7d7785651941531c4488452b61d240531263cebe2a89956721c5c"} Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.664677 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-75hmz" podStartSLOduration=1.664644633 podStartE2EDuration="1.664644633s" podCreationTimestamp="2026-02-27 08:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:52:00.655526704 +0000 UTC m=+1419.049928314" watchObservedRunningTime="2026-02-27 08:52:00.664644633 +0000 UTC m=+1419.059046253" Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.688351 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-5hsm6"] Feb 27 08:52:00 crc kubenswrapper[4906]: I0227 08:52:00.748240 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0b66-account-create-update-t85cr"] Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.162064 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-9692-account-create-update-c6k95"] Feb 27 08:52:01 crc kubenswrapper[4906]: W0227 08:52:01.212976 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd7cdbcd3_cfe0_4932_9bd5_2b0ec3553c82.slice/crio-701a0ca4213b2e9bf646fa4e289703b1030a90a092e36aebb6c1efa75f08f34c WatchSource:0}: Error finding container 701a0ca4213b2e9bf646fa4e289703b1030a90a092e36aebb6c1efa75f08f34c: Status 404 returned error can't find the container with id 701a0ca4213b2e9bf646fa4e289703b1030a90a092e36aebb6c1efa75f08f34c Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.248324 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-cjk9b"] Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.395441 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-5654-account-create-update-ljxcj"] Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.409510 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:01 crc kubenswrapper[4906]: W0227 08:52:01.447282 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod632dcbd3_7f39_484b_92db_ad2f1b574361.slice/crio-4845f1f456d42eb22c75ede21789383b9a86bc864b0cb1409cc45f76be85339c WatchSource:0}: Error finding container 4845f1f456d42eb22c75ede21789383b9a86bc864b0cb1409cc45f76be85339c: Status 404 returned error can't find the container with id 4845f1f456d42eb22c75ede21789383b9a86bc864b0cb1409cc45f76be85339c Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.674840 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0b66-account-create-update-t85cr" event={"ID":"1d2a80d9-824f-416e-a035-de905d169e5d","Type":"ContainerStarted","Data":"9d0e8a0faa98f4bbb52fddfbdb315807fc911355169f50a0567651ff92cf2f30"} Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.674921 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0b66-account-create-update-t85cr" event={"ID":"1d2a80d9-824f-416e-a035-de905d169e5d","Type":"ContainerStarted","Data":"7c6203515d30c7a94fa242d167756598446fce84b42fe4c5ed0720cc0b81c70a"} Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.678208 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"632dcbd3-7f39-484b-92db-ad2f1b574361","Type":"ContainerStarted","Data":"4845f1f456d42eb22c75ede21789383b9a86bc864b0cb1409cc45f76be85339c"} Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.683436 4906 generic.go:334] "Generic (PLEG): container finished" podID="e6ab1c10-b552-4a69-94c7-68280ab7e126" containerID="d1136f91f3db289760b67bf78418a2809b37b6cc1c92b62bf1341729971407ed" exitCode=0 Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.683533 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-98c78d5f8-j9wmn" event={"ID":"e6ab1c10-b552-4a69-94c7-68280ab7e126","Type":"ContainerDied","Data":"d1136f91f3db289760b67bf78418a2809b37b6cc1c92b62bf1341729971407ed"} Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.685588 4906 generic.go:334] "Generic (PLEG): container finished" podID="c831978a-e34a-46bf-99c4-bfdffd022f43" containerID="76b339b33a0b28bc3f6fa912f3e45da2330cbcbbea668116e53832ff3901ea61" exitCode=0 Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.685670 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-75hmz" event={"ID":"c831978a-e34a-46bf-99c4-bfdffd022f43","Type":"ContainerDied","Data":"76b339b33a0b28bc3f6fa912f3e45da2330cbcbbea668116e53832ff3901ea61"} Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.691250 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.724936 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9692-account-create-update-c6k95" event={"ID":"d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82","Type":"ContainerStarted","Data":"ebc6421b861a9164fd06622c9f90613ca130d8585055607e360ca2425d491e72"} Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.724993 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9692-account-create-update-c6k95" event={"ID":"d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82","Type":"ContainerStarted","Data":"701a0ca4213b2e9bf646fa4e289703b1030a90a092e36aebb6c1efa75f08f34c"} Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.735267 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-cjk9b" event={"ID":"6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b","Type":"ContainerStarted","Data":"57f6e116ea68d4f543cf76acc81ae356dabee8bdd94c8cc1aed1863792fc3c4a"} Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.760995 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0b66-account-create-update-t85cr" podStartSLOduration=2.7609594 podStartE2EDuration="2.7609594s" podCreationTimestamp="2026-02-27 08:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:52:01.699470787 +0000 UTC m=+1420.093872397" watchObservedRunningTime="2026-02-27 08:52:01.7609594 +0000 UTC m=+1420.155361020" Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.768255 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5654-account-create-update-ljxcj" event={"ID":"f0033485-16b3-4a98-a7c2-588f320dddee","Type":"ContainerStarted","Data":"6652a95af235e3a4a8f6620c19b67cdf73bd02906ec940b0b60cc36e348e4c1c"} Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.829698 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-5hsm6" event={"ID":"9b86e3b5-b8e0-48ce-8903-4dc93860723f","Type":"ContainerStarted","Data":"b722e746a29621b0ed9453587181d6dc0090cef7cddbd963e27e0510f68a724a"} Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.830002 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-5hsm6" event={"ID":"9b86e3b5-b8e0-48ce-8903-4dc93860723f","Type":"ContainerStarted","Data":"dc20a46328b3ff249d19178c385f58f8e772f4c45c4bde4e852cb5bbf5d5fc56"} Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.854814 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536372-clhfq"] Feb 27 08:52:01 crc kubenswrapper[4906]: I0227 08:52:01.882377 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-5hsm6" podStartSLOduration=2.8823521640000003 podStartE2EDuration="2.882352164s" podCreationTimestamp="2026-02-27 08:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:52:01.86845992 +0000 UTC m=+1420.262861530" watchObservedRunningTime="2026-02-27 08:52:01.882352164 +0000 UTC m=+1420.276753774" Feb 27 08:52:02 crc kubenswrapper[4906]: I0227 08:52:02.199923 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-98c78d5f8-j9wmn" podUID="e6ab1c10-b552-4a69-94c7-68280ab7e126" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.149:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.149:8443: connect: connection refused" Feb 27 08:52:02 crc kubenswrapper[4906]: I0227 08:52:02.856253 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38","Type":"ContainerStarted","Data":"47780919e1f0a0b55a8bca577ba660ef681d45d4c9176c5cbad82216f3177d91"} Feb 27 08:52:02 crc kubenswrapper[4906]: I0227 08:52:02.864273 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536372-clhfq" event={"ID":"2ba4b224-b847-4ce9-a4f4-52fa5001cb2d","Type":"ContainerStarted","Data":"bcea8b771e8513f7cfd2bf608619f865ccd152bed2e529004187ddeccb67ae3a"} Feb 27 08:52:02 crc kubenswrapper[4906]: I0227 08:52:02.868872 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"16a6f839-050d-49e8-b788-3ff1f5e46329","Type":"ContainerStarted","Data":"05a1cd093915b2d8d100e34ec22e253cdef70610e07f757899b0638c1bdf6fe7"} Feb 27 08:52:02 crc kubenswrapper[4906]: I0227 08:52:02.871408 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"4503410a-77c1-4da7-b599-f6746affaaf8","Type":"ContainerStarted","Data":"cd2595a5f43f3dadfec43284c957fc4e646bfa9041945fbcebea8e3f479df528"} Feb 27 08:52:02 crc kubenswrapper[4906]: I0227 08:52:02.874402 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-cjk9b" event={"ID":"6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b","Type":"ContainerStarted","Data":"13547733979dada0ef59022f262dc14d07f4f903a24002918bf71775551085cb"} Feb 27 08:52:02 crc kubenswrapper[4906]: I0227 08:52:02.876619 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5654-account-create-update-ljxcj" event={"ID":"f0033485-16b3-4a98-a7c2-588f320dddee","Type":"ContainerStarted","Data":"f9cbe66b3d932a3f7ec3d208f69f8d28b80515d8665e45108857c970c1886402"} Feb 27 08:52:02 crc kubenswrapper[4906]: I0227 08:52:02.924751 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=9.924724226 podStartE2EDuration="9.924724226s" podCreationTimestamp="2026-02-27 08:51:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:52:02.896848155 +0000 UTC m=+1421.291249785" watchObservedRunningTime="2026-02-27 08:52:02.924724226 +0000 UTC m=+1421.319125836" Feb 27 08:52:02 crc kubenswrapper[4906]: I0227 08:52:02.929861 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-5654-account-create-update-ljxcj" podStartSLOduration=3.9298364599999998 podStartE2EDuration="3.92983646s" podCreationTimestamp="2026-02-27 08:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:52:02.921026549 +0000 UTC m=+1421.315428179" watchObservedRunningTime="2026-02-27 08:52:02.92983646 +0000 UTC m=+1421.324238070" Feb 27 08:52:02 crc kubenswrapper[4906]: I0227 08:52:02.943814 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-9692-account-create-update-c6k95" podStartSLOduration=3.943783086 podStartE2EDuration="3.943783086s" podCreationTimestamp="2026-02-27 08:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:52:02.941130006 +0000 UTC m=+1421.335531636" watchObservedRunningTime="2026-02-27 08:52:02.943783086 +0000 UTC m=+1421.338184696" Feb 27 08:52:03 crc kubenswrapper[4906]: I0227 08:52:03.455306 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-75hmz" Feb 27 08:52:03 crc kubenswrapper[4906]: I0227 08:52:03.622240 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c831978a-e34a-46bf-99c4-bfdffd022f43-operator-scripts\") pod \"c831978a-e34a-46bf-99c4-bfdffd022f43\" (UID: \"c831978a-e34a-46bf-99c4-bfdffd022f43\") " Feb 27 08:52:03 crc kubenswrapper[4906]: I0227 08:52:03.623472 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c831978a-e34a-46bf-99c4-bfdffd022f43-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c831978a-e34a-46bf-99c4-bfdffd022f43" (UID: "c831978a-e34a-46bf-99c4-bfdffd022f43"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:52:03 crc kubenswrapper[4906]: I0227 08:52:03.623831 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8zgkm\" (UniqueName: \"kubernetes.io/projected/c831978a-e34a-46bf-99c4-bfdffd022f43-kube-api-access-8zgkm\") pod \"c831978a-e34a-46bf-99c4-bfdffd022f43\" (UID: \"c831978a-e34a-46bf-99c4-bfdffd022f43\") " Feb 27 08:52:03 crc kubenswrapper[4906]: I0227 08:52:03.626344 4906 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c831978a-e34a-46bf-99c4-bfdffd022f43-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:03 crc kubenswrapper[4906]: I0227 08:52:03.628972 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c831978a-e34a-46bf-99c4-bfdffd022f43-kube-api-access-8zgkm" (OuterVolumeSpecName: "kube-api-access-8zgkm") pod "c831978a-e34a-46bf-99c4-bfdffd022f43" (UID: "c831978a-e34a-46bf-99c4-bfdffd022f43"). InnerVolumeSpecName "kube-api-access-8zgkm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:52:03 crc kubenswrapper[4906]: I0227 08:52:03.732240 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8zgkm\" (UniqueName: \"kubernetes.io/projected/c831978a-e34a-46bf-99c4-bfdffd022f43-kube-api-access-8zgkm\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:03 crc kubenswrapper[4906]: I0227 08:52:03.787294 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 27 08:52:03 crc kubenswrapper[4906]: I0227 08:52:03.787374 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Feb 27 08:52:03 crc kubenswrapper[4906]: I0227 08:52:03.828718 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 27 08:52:03 crc kubenswrapper[4906]: I0227 08:52:03.838873 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Feb 27 08:52:03 crc kubenswrapper[4906]: I0227 08:52:03.899931 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-75hmz" Feb 27 08:52:03 crc kubenswrapper[4906]: I0227 08:52:03.899989 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-75hmz" event={"ID":"c831978a-e34a-46bf-99c4-bfdffd022f43","Type":"ContainerDied","Data":"baa54e097fe7d7785651941531c4488452b61d240531263cebe2a89956721c5c"} Feb 27 08:52:03 crc kubenswrapper[4906]: I0227 08:52:03.900435 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="baa54e097fe7d7785651941531c4488452b61d240531263cebe2a89956721c5c" Feb 27 08:52:03 crc kubenswrapper[4906]: I0227 08:52:03.903319 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38","Type":"ContainerStarted","Data":"fca3a3c7907144ae3245924a6874ad482b60e8bfb5940539cc8b47a085b8395a"} Feb 27 08:52:03 crc kubenswrapper[4906]: I0227 08:52:03.904027 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 27 08:52:03 crc kubenswrapper[4906]: I0227 08:52:03.904055 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Feb 27 08:52:03 crc kubenswrapper[4906]: I0227 08:52:03.941283 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-db-create-cjk9b" podStartSLOduration=4.94125804 podStartE2EDuration="4.94125804s" podCreationTimestamp="2026-02-27 08:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:52:03.918941084 +0000 UTC m=+1422.313342694" watchObservedRunningTime="2026-02-27 08:52:03.94125804 +0000 UTC m=+1422.335659650" Feb 27 08:52:04 crc kubenswrapper[4906]: E0227 08:52:04.255375 4906 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b86e3b5_b8e0_48ce_8903_4dc93860723f.slice/crio-conmon-b722e746a29621b0ed9453587181d6dc0090cef7cddbd963e27e0510f68a724a.scope\": RecentStats: unable to find data in memory cache]" Feb 27 08:52:04 crc kubenswrapper[4906]: I0227 08:52:04.925405 4906 generic.go:334] "Generic (PLEG): container finished" podID="9b86e3b5-b8e0-48ce-8903-4dc93860723f" containerID="b722e746a29621b0ed9453587181d6dc0090cef7cddbd963e27e0510f68a724a" exitCode=0 Feb 27 08:52:04 crc kubenswrapper[4906]: I0227 08:52:04.925427 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-5hsm6" event={"ID":"9b86e3b5-b8e0-48ce-8903-4dc93860723f","Type":"ContainerDied","Data":"b722e746a29621b0ed9453587181d6dc0090cef7cddbd963e27e0510f68a724a"} Feb 27 08:52:05 crc kubenswrapper[4906]: I0227 08:52:05.959663 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"632dcbd3-7f39-484b-92db-ad2f1b574361","Type":"ContainerStarted","Data":"50c389a4f7ac83a1d44ce56c9fd2c7d9c946bc8102f7c258fa461296bbd331ca"} Feb 27 08:52:05 crc kubenswrapper[4906]: I0227 08:52:05.966833 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"16a6f839-050d-49e8-b788-3ff1f5e46329","Type":"ContainerStarted","Data":"9725bcd473f62c1e128c5481354656c8b5915da3af96289ec42faff94efe10ad"} Feb 27 08:52:05 crc kubenswrapper[4906]: I0227 08:52:05.970578 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38","Type":"ContainerStarted","Data":"4ac7c7a53d4e420665853d4b174fbc021b14b271f344f8d37368d7fb98cd6303"} Feb 27 08:52:06 crc kubenswrapper[4906]: I0227 08:52:06.002478 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=8.002453356 podStartE2EDuration="8.002453356s" podCreationTimestamp="2026-02-27 08:51:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:52:05.994471667 +0000 UTC m=+1424.388873287" watchObservedRunningTime="2026-02-27 08:52:06.002453356 +0000 UTC m=+1424.396854956" Feb 27 08:52:06 crc kubenswrapper[4906]: I0227 08:52:06.045912 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=7.045869755 podStartE2EDuration="7.045869755s" podCreationTimestamp="2026-02-27 08:51:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:52:06.026325743 +0000 UTC m=+1424.420727383" watchObservedRunningTime="2026-02-27 08:52:06.045869755 +0000 UTC m=+1424.440271365" Feb 27 08:52:06 crc kubenswrapper[4906]: I0227 08:52:06.519920 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-5hsm6" Feb 27 08:52:06 crc kubenswrapper[4906]: I0227 08:52:06.613847 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hz9lv\" (UniqueName: \"kubernetes.io/projected/9b86e3b5-b8e0-48ce-8903-4dc93860723f-kube-api-access-hz9lv\") pod \"9b86e3b5-b8e0-48ce-8903-4dc93860723f\" (UID: \"9b86e3b5-b8e0-48ce-8903-4dc93860723f\") " Feb 27 08:52:06 crc kubenswrapper[4906]: I0227 08:52:06.614934 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9b86e3b5-b8e0-48ce-8903-4dc93860723f-operator-scripts\") pod \"9b86e3b5-b8e0-48ce-8903-4dc93860723f\" (UID: \"9b86e3b5-b8e0-48ce-8903-4dc93860723f\") " Feb 27 08:52:06 crc kubenswrapper[4906]: I0227 08:52:06.615561 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b86e3b5-b8e0-48ce-8903-4dc93860723f-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9b86e3b5-b8e0-48ce-8903-4dc93860723f" (UID: "9b86e3b5-b8e0-48ce-8903-4dc93860723f"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:52:06 crc kubenswrapper[4906]: I0227 08:52:06.621017 4906 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9b86e3b5-b8e0-48ce-8903-4dc93860723f-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:06 crc kubenswrapper[4906]: I0227 08:52:06.624237 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b86e3b5-b8e0-48ce-8903-4dc93860723f-kube-api-access-hz9lv" (OuterVolumeSpecName: "kube-api-access-hz9lv") pod "9b86e3b5-b8e0-48ce-8903-4dc93860723f" (UID: "9b86e3b5-b8e0-48ce-8903-4dc93860723f"). InnerVolumeSpecName "kube-api-access-hz9lv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:52:06 crc kubenswrapper[4906]: I0227 08:52:06.723723 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hz9lv\" (UniqueName: \"kubernetes.io/projected/9b86e3b5-b8e0-48ce-8903-4dc93860723f-kube-api-access-hz9lv\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:06 crc kubenswrapper[4906]: I0227 08:52:06.991097 4906 generic.go:334] "Generic (PLEG): container finished" podID="d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82" containerID="ebc6421b861a9164fd06622c9f90613ca130d8585055607e360ca2425d491e72" exitCode=0 Feb 27 08:52:06 crc kubenswrapper[4906]: I0227 08:52:06.991163 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9692-account-create-update-c6k95" event={"ID":"d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82","Type":"ContainerDied","Data":"ebc6421b861a9164fd06622c9f90613ca130d8585055607e360ca2425d491e72"} Feb 27 08:52:06 crc kubenswrapper[4906]: I0227 08:52:06.994355 4906 generic.go:334] "Generic (PLEG): container finished" podID="6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b" containerID="13547733979dada0ef59022f262dc14d07f4f903a24002918bf71775551085cb" exitCode=0 Feb 27 08:52:06 crc kubenswrapper[4906]: I0227 08:52:06.994396 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-cjk9b" event={"ID":"6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b","Type":"ContainerDied","Data":"13547733979dada0ef59022f262dc14d07f4f903a24002918bf71775551085cb"} Feb 27 08:52:06 crc kubenswrapper[4906]: I0227 08:52:06.997544 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-5hsm6" Feb 27 08:52:06 crc kubenswrapper[4906]: I0227 08:52:06.997964 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-5hsm6" event={"ID":"9b86e3b5-b8e0-48ce-8903-4dc93860723f","Type":"ContainerDied","Data":"dc20a46328b3ff249d19178c385f58f8e772f4c45c4bde4e852cb5bbf5d5fc56"} Feb 27 08:52:06 crc kubenswrapper[4906]: I0227 08:52:06.997999 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc20a46328b3ff249d19178c385f58f8e772f4c45c4bde4e852cb5bbf5d5fc56" Feb 27 08:52:07 crc kubenswrapper[4906]: I0227 08:52:07.685198 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 27 08:52:07 crc kubenswrapper[4906]: I0227 08:52:07.705337 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Feb 27 08:52:08 crc kubenswrapper[4906]: I0227 08:52:08.009735 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536372-clhfq" event={"ID":"2ba4b224-b847-4ce9-a4f4-52fa5001cb2d","Type":"ContainerStarted","Data":"3b839e8657d267f2171c4b62f23b5996cb039719c6933b1c1a508b488bff636f"} Feb 27 08:52:08 crc kubenswrapper[4906]: I0227 08:52:08.606128 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9692-account-create-update-c6k95" Feb 27 08:52:08 crc kubenswrapper[4906]: I0227 08:52:08.718768 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-cjk9b" Feb 27 08:52:08 crc kubenswrapper[4906]: I0227 08:52:08.797537 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82-operator-scripts\") pod \"d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82\" (UID: \"d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82\") " Feb 27 08:52:08 crc kubenswrapper[4906]: I0227 08:52:08.797935 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jk6rp\" (UniqueName: \"kubernetes.io/projected/d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82-kube-api-access-jk6rp\") pod \"d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82\" (UID: \"d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82\") " Feb 27 08:52:08 crc kubenswrapper[4906]: I0227 08:52:08.798835 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82" (UID: "d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:52:08 crc kubenswrapper[4906]: I0227 08:52:08.809040 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82-kube-api-access-jk6rp" (OuterVolumeSpecName: "kube-api-access-jk6rp") pod "d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82" (UID: "d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82"). InnerVolumeSpecName "kube-api-access-jk6rp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:52:08 crc kubenswrapper[4906]: I0227 08:52:08.900377 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b-operator-scripts\") pod \"6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b\" (UID: \"6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b\") " Feb 27 08:52:08 crc kubenswrapper[4906]: I0227 08:52:08.900647 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xpkxd\" (UniqueName: \"kubernetes.io/projected/6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b-kube-api-access-xpkxd\") pod \"6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b\" (UID: \"6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b\") " Feb 27 08:52:08 crc kubenswrapper[4906]: I0227 08:52:08.901020 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b" (UID: "6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:52:08 crc kubenswrapper[4906]: I0227 08:52:08.901437 4906 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:08 crc kubenswrapper[4906]: I0227 08:52:08.901461 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jk6rp\" (UniqueName: \"kubernetes.io/projected/d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82-kube-api-access-jk6rp\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:08 crc kubenswrapper[4906]: I0227 08:52:08.901475 4906 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:08 crc kubenswrapper[4906]: I0227 08:52:08.904163 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b-kube-api-access-xpkxd" (OuterVolumeSpecName: "kube-api-access-xpkxd") pod "6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b" (UID: "6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b"). InnerVolumeSpecName "kube-api-access-xpkxd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:52:09 crc kubenswrapper[4906]: I0227 08:52:09.003662 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xpkxd\" (UniqueName: \"kubernetes.io/projected/6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b-kube-api-access-xpkxd\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:09 crc kubenswrapper[4906]: I0227 08:52:09.021838 4906 generic.go:334] "Generic (PLEG): container finished" podID="1d2a80d9-824f-416e-a035-de905d169e5d" containerID="9d0e8a0faa98f4bbb52fddfbdb315807fc911355169f50a0567651ff92cf2f30" exitCode=0 Feb 27 08:52:09 crc kubenswrapper[4906]: I0227 08:52:09.021935 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0b66-account-create-update-t85cr" event={"ID":"1d2a80d9-824f-416e-a035-de905d169e5d","Type":"ContainerDied","Data":"9d0e8a0faa98f4bbb52fddfbdb315807fc911355169f50a0567651ff92cf2f30"} Feb 27 08:52:09 crc kubenswrapper[4906]: I0227 08:52:09.024955 4906 generic.go:334] "Generic (PLEG): container finished" podID="2ba4b224-b847-4ce9-a4f4-52fa5001cb2d" containerID="3b839e8657d267f2171c4b62f23b5996cb039719c6933b1c1a508b488bff636f" exitCode=0 Feb 27 08:52:09 crc kubenswrapper[4906]: I0227 08:52:09.025022 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536372-clhfq" event={"ID":"2ba4b224-b847-4ce9-a4f4-52fa5001cb2d","Type":"ContainerDied","Data":"3b839e8657d267f2171c4b62f23b5996cb039719c6933b1c1a508b488bff636f"} Feb 27 08:52:09 crc kubenswrapper[4906]: I0227 08:52:09.027488 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"632dcbd3-7f39-484b-92db-ad2f1b574361","Type":"ContainerStarted","Data":"06c781a656bcf258af2a15e81ebac37f2647cff8973ebe0605fe5309e50340ab"} Feb 27 08:52:09 crc kubenswrapper[4906]: I0227 08:52:09.029487 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-cjk9b" event={"ID":"6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b","Type":"ContainerDied","Data":"57f6e116ea68d4f543cf76acc81ae356dabee8bdd94c8cc1aed1863792fc3c4a"} Feb 27 08:52:09 crc kubenswrapper[4906]: I0227 08:52:09.029516 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-cjk9b" Feb 27 08:52:09 crc kubenswrapper[4906]: I0227 08:52:09.029536 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="57f6e116ea68d4f543cf76acc81ae356dabee8bdd94c8cc1aed1863792fc3c4a" Feb 27 08:52:09 crc kubenswrapper[4906]: I0227 08:52:09.031608 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-9692-account-create-update-c6k95" event={"ID":"d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82","Type":"ContainerDied","Data":"701a0ca4213b2e9bf646fa4e289703b1030a90a092e36aebb6c1efa75f08f34c"} Feb 27 08:52:09 crc kubenswrapper[4906]: I0227 08:52:09.031632 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="701a0ca4213b2e9bf646fa4e289703b1030a90a092e36aebb6c1efa75f08f34c" Feb 27 08:52:09 crc kubenswrapper[4906]: I0227 08:52:09.031662 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-9692-account-create-update-c6k95" Feb 27 08:52:09 crc kubenswrapper[4906]: I0227 08:52:09.033763 4906 generic.go:334] "Generic (PLEG): container finished" podID="f0033485-16b3-4a98-a7c2-588f320dddee" containerID="f9cbe66b3d932a3f7ec3d208f69f8d28b80515d8665e45108857c970c1886402" exitCode=0 Feb 27 08:52:09 crc kubenswrapper[4906]: I0227 08:52:09.033809 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5654-account-create-update-ljxcj" event={"ID":"f0033485-16b3-4a98-a7c2-588f320dddee","Type":"ContainerDied","Data":"f9cbe66b3d932a3f7ec3d208f69f8d28b80515d8665e45108857c970c1886402"} Feb 27 08:52:09 crc kubenswrapper[4906]: I0227 08:52:09.430809 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Feb 27 08:52:09 crc kubenswrapper[4906]: I0227 08:52:09.690954 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.041609 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.046542 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"632dcbd3-7f39-484b-92db-ad2f1b574361","Type":"ContainerStarted","Data":"4840d6aad169d87b8df957df5a6e9e0d01f85a7a51d8f99a35de5056ff5f8167"} Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.356056 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.356126 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.400985 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.472258 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.685295 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0b66-account-create-update-t85cr" Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.803026 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536372-clhfq" Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.809214 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5654-account-create-update-ljxcj" Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.852709 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d2a80d9-824f-416e-a035-de905d169e5d-operator-scripts\") pod \"1d2a80d9-824f-416e-a035-de905d169e5d\" (UID: \"1d2a80d9-824f-416e-a035-de905d169e5d\") " Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.852953 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nvjdv\" (UniqueName: \"kubernetes.io/projected/1d2a80d9-824f-416e-a035-de905d169e5d-kube-api-access-nvjdv\") pod \"1d2a80d9-824f-416e-a035-de905d169e5d\" (UID: \"1d2a80d9-824f-416e-a035-de905d169e5d\") " Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.858162 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d2a80d9-824f-416e-a035-de905d169e5d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1d2a80d9-824f-416e-a035-de905d169e5d" (UID: "1d2a80d9-824f-416e-a035-de905d169e5d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.879367 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d2a80d9-824f-416e-a035-de905d169e5d-kube-api-access-nvjdv" (OuterVolumeSpecName: "kube-api-access-nvjdv") pod "1d2a80d9-824f-416e-a035-de905d169e5d" (UID: "1d2a80d9-824f-416e-a035-de905d169e5d"). InnerVolumeSpecName "kube-api-access-nvjdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.955282 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f0033485-16b3-4a98-a7c2-588f320dddee-operator-scripts\") pod \"f0033485-16b3-4a98-a7c2-588f320dddee\" (UID: \"f0033485-16b3-4a98-a7c2-588f320dddee\") " Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.955578 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flqvf\" (UniqueName: \"kubernetes.io/projected/f0033485-16b3-4a98-a7c2-588f320dddee-kube-api-access-flqvf\") pod \"f0033485-16b3-4a98-a7c2-588f320dddee\" (UID: \"f0033485-16b3-4a98-a7c2-588f320dddee\") " Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.955684 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rxddz\" (UniqueName: \"kubernetes.io/projected/2ba4b224-b847-4ce9-a4f4-52fa5001cb2d-kube-api-access-rxddz\") pod \"2ba4b224-b847-4ce9-a4f4-52fa5001cb2d\" (UID: \"2ba4b224-b847-4ce9-a4f4-52fa5001cb2d\") " Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.955873 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f0033485-16b3-4a98-a7c2-588f320dddee-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f0033485-16b3-4a98-a7c2-588f320dddee" (UID: "f0033485-16b3-4a98-a7c2-588f320dddee"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.956282 4906 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f0033485-16b3-4a98-a7c2-588f320dddee-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.956301 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nvjdv\" (UniqueName: \"kubernetes.io/projected/1d2a80d9-824f-416e-a035-de905d169e5d-kube-api-access-nvjdv\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.956316 4906 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1d2a80d9-824f-416e-a035-de905d169e5d-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.960438 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0033485-16b3-4a98-a7c2-588f320dddee-kube-api-access-flqvf" (OuterVolumeSpecName: "kube-api-access-flqvf") pod "f0033485-16b3-4a98-a7c2-588f320dddee" (UID: "f0033485-16b3-4a98-a7c2-588f320dddee"). InnerVolumeSpecName "kube-api-access-flqvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:52:10 crc kubenswrapper[4906]: I0227 08:52:10.960844 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ba4b224-b847-4ce9-a4f4-52fa5001cb2d-kube-api-access-rxddz" (OuterVolumeSpecName: "kube-api-access-rxddz") pod "2ba4b224-b847-4ce9-a4f4-52fa5001cb2d" (UID: "2ba4b224-b847-4ce9-a4f4-52fa5001cb2d"). InnerVolumeSpecName "kube-api-access-rxddz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.060447 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flqvf\" (UniqueName: \"kubernetes.io/projected/f0033485-16b3-4a98-a7c2-588f320dddee-kube-api-access-flqvf\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.060479 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rxddz\" (UniqueName: \"kubernetes.io/projected/2ba4b224-b847-4ce9-a4f4-52fa5001cb2d-kube-api-access-rxddz\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.060986 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0b66-account-create-update-t85cr" event={"ID":"1d2a80d9-824f-416e-a035-de905d169e5d","Type":"ContainerDied","Data":"7c6203515d30c7a94fa242d167756598446fce84b42fe4c5ed0720cc0b81c70a"} Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.061031 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0b66-account-create-update-t85cr" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.061057 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c6203515d30c7a94fa242d167756598446fce84b42fe4c5ed0720cc0b81c70a" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.063985 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536372-clhfq" event={"ID":"2ba4b224-b847-4ce9-a4f4-52fa5001cb2d","Type":"ContainerDied","Data":"bcea8b771e8513f7cfd2bf608619f865ccd152bed2e529004187ddeccb67ae3a"} Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.064033 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bcea8b771e8513f7cfd2bf608619f865ccd152bed2e529004187ddeccb67ae3a" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.064145 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536372-clhfq" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.070642 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5654-account-create-update-ljxcj" event={"ID":"f0033485-16b3-4a98-a7c2-588f320dddee","Type":"ContainerDied","Data":"6652a95af235e3a4a8f6620c19b67cdf73bd02906ec940b0b60cc36e348e4c1c"} Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.070719 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6652a95af235e3a4a8f6620c19b67cdf73bd02906ec940b0b60cc36e348e4c1c" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.070761 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.071945 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5654-account-create-update-ljxcj" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.072757 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.496733 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.569857 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2pxg5\" (UniqueName: \"kubernetes.io/projected/6717d894-ee45-4de0-9c94-5778bf5d9884-kube-api-access-2pxg5\") pod \"6717d894-ee45-4de0-9c94-5778bf5d9884\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.569950 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-scripts\") pod \"6717d894-ee45-4de0-9c94-5778bf5d9884\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.570006 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6717d894-ee45-4de0-9c94-5778bf5d9884-etc-machine-id\") pod \"6717d894-ee45-4de0-9c94-5778bf5d9884\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.570046 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-combined-ca-bundle\") pod \"6717d894-ee45-4de0-9c94-5778bf5d9884\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.570165 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6717d894-ee45-4de0-9c94-5778bf5d9884-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "6717d894-ee45-4de0-9c94-5778bf5d9884" (UID: "6717d894-ee45-4de0-9c94-5778bf5d9884"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.570193 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6717d894-ee45-4de0-9c94-5778bf5d9884-logs\") pod \"6717d894-ee45-4de0-9c94-5778bf5d9884\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.570537 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-config-data\") pod \"6717d894-ee45-4de0-9c94-5778bf5d9884\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.570585 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-config-data-custom\") pod \"6717d894-ee45-4de0-9c94-5778bf5d9884\" (UID: \"6717d894-ee45-4de0-9c94-5778bf5d9884\") " Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.571766 4906 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/6717d894-ee45-4de0-9c94-5778bf5d9884-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.587449 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-scripts" (OuterVolumeSpecName: "scripts") pod "6717d894-ee45-4de0-9c94-5778bf5d9884" (UID: "6717d894-ee45-4de0-9c94-5778bf5d9884"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.590378 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6717d894-ee45-4de0-9c94-5778bf5d9884-logs" (OuterVolumeSpecName: "logs") pod "6717d894-ee45-4de0-9c94-5778bf5d9884" (UID: "6717d894-ee45-4de0-9c94-5778bf5d9884"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.631472 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6717d894-ee45-4de0-9c94-5778bf5d9884-kube-api-access-2pxg5" (OuterVolumeSpecName: "kube-api-access-2pxg5") pod "6717d894-ee45-4de0-9c94-5778bf5d9884" (UID: "6717d894-ee45-4de0-9c94-5778bf5d9884"). InnerVolumeSpecName "kube-api-access-2pxg5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.633188 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "6717d894-ee45-4de0-9c94-5778bf5d9884" (UID: "6717d894-ee45-4de0-9c94-5778bf5d9884"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.678097 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6717d894-ee45-4de0-9c94-5778bf5d9884" (UID: "6717d894-ee45-4de0-9c94-5778bf5d9884"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.682439 4906 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6717d894-ee45-4de0-9c94-5778bf5d9884-logs\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.682477 4906 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.682488 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2pxg5\" (UniqueName: \"kubernetes.io/projected/6717d894-ee45-4de0-9c94-5778bf5d9884-kube-api-access-2pxg5\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.682499 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.682508 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.743213 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-config-data" (OuterVolumeSpecName: "config-data") pod "6717d894-ee45-4de0-9c94-5778bf5d9884" (UID: "6717d894-ee45-4de0-9c94-5778bf5d9884"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:11 crc kubenswrapper[4906]: I0227 08:52:11.820708 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6717d894-ee45-4de0-9c94-5778bf5d9884-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.008943 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536366-bgvjq"] Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.019255 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536366-bgvjq"] Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.082104 4906 generic.go:334] "Generic (PLEG): container finished" podID="6717d894-ee45-4de0-9c94-5778bf5d9884" containerID="71bd2eacdf55ef5b7ab0cdc007f0e942baa98c45afd80ce0ee5f7a87823c688c" exitCode=137 Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.082172 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6717d894-ee45-4de0-9c94-5778bf5d9884","Type":"ContainerDied","Data":"71bd2eacdf55ef5b7ab0cdc007f0e942baa98c45afd80ce0ee5f7a87823c688c"} Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.082250 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"6717d894-ee45-4de0-9c94-5778bf5d9884","Type":"ContainerDied","Data":"b9c1978c70115178be2895edd6f3f1877206ac33e78045196d44981001a553fa"} Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.082272 4906 scope.go:117] "RemoveContainer" containerID="71bd2eacdf55ef5b7ab0cdc007f0e942baa98c45afd80ce0ee5f7a87823c688c" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.082283 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.115810 4906 scope.go:117] "RemoveContainer" containerID="fedd178614a8e52faadf3967d5eb0cddf6b0de85fc06a858266f0b94ac46df66" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.128989 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.142136 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.146941 4906 scope.go:117] "RemoveContainer" containerID="71bd2eacdf55ef5b7ab0cdc007f0e942baa98c45afd80ce0ee5f7a87823c688c" Feb 27 08:52:12 crc kubenswrapper[4906]: E0227 08:52:12.148011 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71bd2eacdf55ef5b7ab0cdc007f0e942baa98c45afd80ce0ee5f7a87823c688c\": container with ID starting with 71bd2eacdf55ef5b7ab0cdc007f0e942baa98c45afd80ce0ee5f7a87823c688c not found: ID does not exist" containerID="71bd2eacdf55ef5b7ab0cdc007f0e942baa98c45afd80ce0ee5f7a87823c688c" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.148082 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71bd2eacdf55ef5b7ab0cdc007f0e942baa98c45afd80ce0ee5f7a87823c688c"} err="failed to get container status \"71bd2eacdf55ef5b7ab0cdc007f0e942baa98c45afd80ce0ee5f7a87823c688c\": rpc error: code = NotFound desc = could not find container \"71bd2eacdf55ef5b7ab0cdc007f0e942baa98c45afd80ce0ee5f7a87823c688c\": container with ID starting with 71bd2eacdf55ef5b7ab0cdc007f0e942baa98c45afd80ce0ee5f7a87823c688c not found: ID does not exist" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.148120 4906 scope.go:117] "RemoveContainer" containerID="fedd178614a8e52faadf3967d5eb0cddf6b0de85fc06a858266f0b94ac46df66" Feb 27 08:52:12 crc kubenswrapper[4906]: E0227 08:52:12.152325 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fedd178614a8e52faadf3967d5eb0cddf6b0de85fc06a858266f0b94ac46df66\": container with ID starting with fedd178614a8e52faadf3967d5eb0cddf6b0de85fc06a858266f0b94ac46df66 not found: ID does not exist" containerID="fedd178614a8e52faadf3967d5eb0cddf6b0de85fc06a858266f0b94ac46df66" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.152394 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fedd178614a8e52faadf3967d5eb0cddf6b0de85fc06a858266f0b94ac46df66"} err="failed to get container status \"fedd178614a8e52faadf3967d5eb0cddf6b0de85fc06a858266f0b94ac46df66\": rpc error: code = NotFound desc = could not find container \"fedd178614a8e52faadf3967d5eb0cddf6b0de85fc06a858266f0b94ac46df66\": container with ID starting with fedd178614a8e52faadf3967d5eb0cddf6b0de85fc06a858266f0b94ac46df66 not found: ID does not exist" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.170008 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Feb 27 08:52:12 crc kubenswrapper[4906]: E0227 08:52:12.170712 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b" containerName="mariadb-database-create" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.170744 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b" containerName="mariadb-database-create" Feb 27 08:52:12 crc kubenswrapper[4906]: E0227 08:52:12.170762 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0033485-16b3-4a98-a7c2-588f320dddee" containerName="mariadb-account-create-update" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.170773 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0033485-16b3-4a98-a7c2-588f320dddee" containerName="mariadb-account-create-update" Feb 27 08:52:12 crc kubenswrapper[4906]: E0227 08:52:12.170801 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b86e3b5-b8e0-48ce-8903-4dc93860723f" containerName="mariadb-database-create" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.170810 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b86e3b5-b8e0-48ce-8903-4dc93860723f" containerName="mariadb-database-create" Feb 27 08:52:12 crc kubenswrapper[4906]: E0227 08:52:12.170821 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d2a80d9-824f-416e-a035-de905d169e5d" containerName="mariadb-account-create-update" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.170830 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d2a80d9-824f-416e-a035-de905d169e5d" containerName="mariadb-account-create-update" Feb 27 08:52:12 crc kubenswrapper[4906]: E0227 08:52:12.170848 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82" containerName="mariadb-account-create-update" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.170856 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82" containerName="mariadb-account-create-update" Feb 27 08:52:12 crc kubenswrapper[4906]: E0227 08:52:12.170896 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ba4b224-b847-4ce9-a4f4-52fa5001cb2d" containerName="oc" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.170906 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ba4b224-b847-4ce9-a4f4-52fa5001cb2d" containerName="oc" Feb 27 08:52:12 crc kubenswrapper[4906]: E0227 08:52:12.170918 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c831978a-e34a-46bf-99c4-bfdffd022f43" containerName="mariadb-database-create" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.170926 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="c831978a-e34a-46bf-99c4-bfdffd022f43" containerName="mariadb-database-create" Feb 27 08:52:12 crc kubenswrapper[4906]: E0227 08:52:12.170940 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6717d894-ee45-4de0-9c94-5778bf5d9884" containerName="cinder-api" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.170947 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="6717d894-ee45-4de0-9c94-5778bf5d9884" containerName="cinder-api" Feb 27 08:52:12 crc kubenswrapper[4906]: E0227 08:52:12.170964 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6717d894-ee45-4de0-9c94-5778bf5d9884" containerName="cinder-api-log" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.170972 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="6717d894-ee45-4de0-9c94-5778bf5d9884" containerName="cinder-api-log" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.171226 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d2a80d9-824f-416e-a035-de905d169e5d" containerName="mariadb-account-create-update" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.171245 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0033485-16b3-4a98-a7c2-588f320dddee" containerName="mariadb-account-create-update" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.171263 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="6717d894-ee45-4de0-9c94-5778bf5d9884" containerName="cinder-api" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.171273 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b86e3b5-b8e0-48ce-8903-4dc93860723f" containerName="mariadb-database-create" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.171285 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ba4b224-b847-4ce9-a4f4-52fa5001cb2d" containerName="oc" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.171303 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b" containerName="mariadb-database-create" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.171318 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="c831978a-e34a-46bf-99c4-bfdffd022f43" containerName="mariadb-database-create" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.171328 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82" containerName="mariadb-account-create-update" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.171338 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="6717d894-ee45-4de0-9c94-5778bf5d9884" containerName="cinder-api-log" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.172952 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.179535 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.183717 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.186439 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.187826 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.202644 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-98c78d5f8-j9wmn" podUID="e6ab1c10-b552-4a69-94c7-68280ab7e126" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.149:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.149:8443: connect: connection refused" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.340172 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00396512-9757-4e1d-b801-074ac259bab9-config-data\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.340311 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00396512-9757-4e1d-b801-074ac259bab9-scripts\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.340427 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdmfh\" (UniqueName: \"kubernetes.io/projected/00396512-9757-4e1d-b801-074ac259bab9-kube-api-access-bdmfh\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.340464 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00396512-9757-4e1d-b801-074ac259bab9-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.340495 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/00396512-9757-4e1d-b801-074ac259bab9-public-tls-certs\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.340599 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/00396512-9757-4e1d-b801-074ac259bab9-etc-machine-id\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.340921 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/00396512-9757-4e1d-b801-074ac259bab9-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.341049 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00396512-9757-4e1d-b801-074ac259bab9-logs\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.341100 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/00396512-9757-4e1d-b801-074ac259bab9-config-data-custom\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.443623 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/00396512-9757-4e1d-b801-074ac259bab9-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.443767 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00396512-9757-4e1d-b801-074ac259bab9-logs\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.444303 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/00396512-9757-4e1d-b801-074ac259bab9-logs\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.444470 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/00396512-9757-4e1d-b801-074ac259bab9-config-data-custom\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.444957 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00396512-9757-4e1d-b801-074ac259bab9-config-data\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.445065 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00396512-9757-4e1d-b801-074ac259bab9-scripts\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.445682 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdmfh\" (UniqueName: \"kubernetes.io/projected/00396512-9757-4e1d-b801-074ac259bab9-kube-api-access-bdmfh\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.445756 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00396512-9757-4e1d-b801-074ac259bab9-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.445811 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/00396512-9757-4e1d-b801-074ac259bab9-public-tls-certs\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.445912 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/00396512-9757-4e1d-b801-074ac259bab9-etc-machine-id\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.446084 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/00396512-9757-4e1d-b801-074ac259bab9-etc-machine-id\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.450820 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00396512-9757-4e1d-b801-074ac259bab9-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.451381 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/00396512-9757-4e1d-b801-074ac259bab9-config-data-custom\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.451849 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/00396512-9757-4e1d-b801-074ac259bab9-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.453677 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/00396512-9757-4e1d-b801-074ac259bab9-scripts\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.454470 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/00396512-9757-4e1d-b801-074ac259bab9-public-tls-certs\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.461799 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/00396512-9757-4e1d-b801-074ac259bab9-config-data\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.467687 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdmfh\" (UniqueName: \"kubernetes.io/projected/00396512-9757-4e1d-b801-074ac259bab9-kube-api-access-bdmfh\") pod \"cinder-api-0\" (UID: \"00396512-9757-4e1d-b801-074ac259bab9\") " pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.497600 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.603647 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6717d894-ee45-4de0-9c94-5778bf5d9884" path="/var/lib/kubelet/pods/6717d894-ee45-4de0-9c94-5778bf5d9884/volumes" Feb 27 08:52:12 crc kubenswrapper[4906]: I0227 08:52:12.604487 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4c4a66a-849a-4920-b332-4b6dacbec98b" path="/var/lib/kubelet/pods/f4c4a66a-849a-4920-b332-4b6dacbec98b/volumes" Feb 27 08:52:13 crc kubenswrapper[4906]: I0227 08:52:13.099139 4906 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 27 08:52:13 crc kubenswrapper[4906]: I0227 08:52:13.099466 4906 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 27 08:52:13 crc kubenswrapper[4906]: I0227 08:52:13.100033 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="632dcbd3-7f39-484b-92db-ad2f1b574361" containerName="proxy-httpd" containerID="cri-o://425d5a9fc45f8fff7facd62207d6e8763a1cc680a36f71e53b89361e342f6a53" gracePeriod=30 Feb 27 08:52:13 crc kubenswrapper[4906]: I0227 08:52:13.100168 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="632dcbd3-7f39-484b-92db-ad2f1b574361" containerName="sg-core" containerID="cri-o://4840d6aad169d87b8df957df5a6e9e0d01f85a7a51d8f99a35de5056ff5f8167" gracePeriod=30 Feb 27 08:52:13 crc kubenswrapper[4906]: I0227 08:52:13.100254 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="632dcbd3-7f39-484b-92db-ad2f1b574361" containerName="ceilometer-notification-agent" containerID="cri-o://06c781a656bcf258af2a15e81ebac37f2647cff8973ebe0605fe5309e50340ab" gracePeriod=30 Feb 27 08:52:13 crc kubenswrapper[4906]: I0227 08:52:13.100488 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"632dcbd3-7f39-484b-92db-ad2f1b574361","Type":"ContainerStarted","Data":"425d5a9fc45f8fff7facd62207d6e8763a1cc680a36f71e53b89361e342f6a53"} Feb 27 08:52:13 crc kubenswrapper[4906]: I0227 08:52:13.100564 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 27 08:52:13 crc kubenswrapper[4906]: I0227 08:52:13.099353 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="632dcbd3-7f39-484b-92db-ad2f1b574361" containerName="ceilometer-central-agent" containerID="cri-o://50c389a4f7ac83a1d44ce56c9fd2c7d9c946bc8102f7c258fa461296bbd331ca" gracePeriod=30 Feb 27 08:52:13 crc kubenswrapper[4906]: I0227 08:52:13.136344 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.049646964 podStartE2EDuration="14.136320225s" podCreationTimestamp="2026-02-27 08:51:59 +0000 UTC" firstStartedPulling="2026-02-27 08:52:01.45942089 +0000 UTC m=+1419.853822500" lastFinishedPulling="2026-02-27 08:52:11.546094151 +0000 UTC m=+1429.940495761" observedRunningTime="2026-02-27 08:52:13.127781391 +0000 UTC m=+1431.522183011" watchObservedRunningTime="2026-02-27 08:52:13.136320225 +0000 UTC m=+1431.530721835" Feb 27 08:52:13 crc kubenswrapper[4906]: I0227 08:52:13.176163 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.116754 4906 generic.go:334] "Generic (PLEG): container finished" podID="632dcbd3-7f39-484b-92db-ad2f1b574361" containerID="425d5a9fc45f8fff7facd62207d6e8763a1cc680a36f71e53b89361e342f6a53" exitCode=0 Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.117435 4906 generic.go:334] "Generic (PLEG): container finished" podID="632dcbd3-7f39-484b-92db-ad2f1b574361" containerID="4840d6aad169d87b8df957df5a6e9e0d01f85a7a51d8f99a35de5056ff5f8167" exitCode=2 Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.117446 4906 generic.go:334] "Generic (PLEG): container finished" podID="632dcbd3-7f39-484b-92db-ad2f1b574361" containerID="06c781a656bcf258af2a15e81ebac37f2647cff8973ebe0605fe5309e50340ab" exitCode=0 Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.117456 4906 generic.go:334] "Generic (PLEG): container finished" podID="632dcbd3-7f39-484b-92db-ad2f1b574361" containerID="50c389a4f7ac83a1d44ce56c9fd2c7d9c946bc8102f7c258fa461296bbd331ca" exitCode=0 Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.117516 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"632dcbd3-7f39-484b-92db-ad2f1b574361","Type":"ContainerDied","Data":"425d5a9fc45f8fff7facd62207d6e8763a1cc680a36f71e53b89361e342f6a53"} Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.117549 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"632dcbd3-7f39-484b-92db-ad2f1b574361","Type":"ContainerDied","Data":"4840d6aad169d87b8df957df5a6e9e0d01f85a7a51d8f99a35de5056ff5f8167"} Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.117562 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"632dcbd3-7f39-484b-92db-ad2f1b574361","Type":"ContainerDied","Data":"06c781a656bcf258af2a15e81ebac37f2647cff8973ebe0605fe5309e50340ab"} Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.117572 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"632dcbd3-7f39-484b-92db-ad2f1b574361","Type":"ContainerDied","Data":"50c389a4f7ac83a1d44ce56c9fd2c7d9c946bc8102f7c258fa461296bbd331ca"} Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.120128 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"00396512-9757-4e1d-b801-074ac259bab9","Type":"ContainerStarted","Data":"e463dd0ebe58dac38206b1255c3613170cdbaced2880f99b2f8fa6feeb2e9a5a"} Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.120195 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"00396512-9757-4e1d-b801-074ac259bab9","Type":"ContainerStarted","Data":"722eec677ce33615d8fdadf18879e006a6700f626238f3fe5dad5233ab54e674"} Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.392835 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.393639 4906 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.447366 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.703375 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-p544r"] Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.705812 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-p544r" Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.709568 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.710016 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-ljsrk" Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.710183 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.767549 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-p544r"] Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.828102 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-config-data\") pod \"nova-cell0-conductor-db-sync-p544r\" (UID: \"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b\") " pod="openstack/nova-cell0-conductor-db-sync-p544r" Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.828155 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48tfw\" (UniqueName: \"kubernetes.io/projected/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-kube-api-access-48tfw\") pod \"nova-cell0-conductor-db-sync-p544r\" (UID: \"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b\") " pod="openstack/nova-cell0-conductor-db-sync-p544r" Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.828223 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-scripts\") pod \"nova-cell0-conductor-db-sync-p544r\" (UID: \"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b\") " pod="openstack/nova-cell0-conductor-db-sync-p544r" Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.828276 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-p544r\" (UID: \"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b\") " pod="openstack/nova-cell0-conductor-db-sync-p544r" Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.930394 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-scripts\") pod \"nova-cell0-conductor-db-sync-p544r\" (UID: \"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b\") " pod="openstack/nova-cell0-conductor-db-sync-p544r" Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.930489 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-p544r\" (UID: \"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b\") " pod="openstack/nova-cell0-conductor-db-sync-p544r" Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.930565 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-config-data\") pod \"nova-cell0-conductor-db-sync-p544r\" (UID: \"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b\") " pod="openstack/nova-cell0-conductor-db-sync-p544r" Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.930588 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48tfw\" (UniqueName: \"kubernetes.io/projected/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-kube-api-access-48tfw\") pod \"nova-cell0-conductor-db-sync-p544r\" (UID: \"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b\") " pod="openstack/nova-cell0-conductor-db-sync-p544r" Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.950725 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-scripts\") pod \"nova-cell0-conductor-db-sync-p544r\" (UID: \"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b\") " pod="openstack/nova-cell0-conductor-db-sync-p544r" Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.950923 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-config-data\") pod \"nova-cell0-conductor-db-sync-p544r\" (UID: \"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b\") " pod="openstack/nova-cell0-conductor-db-sync-p544r" Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.954964 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-p544r\" (UID: \"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b\") " pod="openstack/nova-cell0-conductor-db-sync-p544r" Feb 27 08:52:14 crc kubenswrapper[4906]: I0227 08:52:14.961558 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48tfw\" (UniqueName: \"kubernetes.io/projected/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-kube-api-access-48tfw\") pod \"nova-cell0-conductor-db-sync-p544r\" (UID: \"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b\") " pod="openstack/nova-cell0-conductor-db-sync-p544r" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.077420 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-p544r" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.134287 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.163569 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.163738 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"632dcbd3-7f39-484b-92db-ad2f1b574361","Type":"ContainerDied","Data":"4845f1f456d42eb22c75ede21789383b9a86bc864b0cb1409cc45f76be85339c"} Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.163843 4906 scope.go:117] "RemoveContainer" containerID="425d5a9fc45f8fff7facd62207d6e8763a1cc680a36f71e53b89361e342f6a53" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.237253 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-combined-ca-bundle\") pod \"632dcbd3-7f39-484b-92db-ad2f1b574361\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.237310 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-sg-core-conf-yaml\") pod \"632dcbd3-7f39-484b-92db-ad2f1b574361\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.237364 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/632dcbd3-7f39-484b-92db-ad2f1b574361-run-httpd\") pod \"632dcbd3-7f39-484b-92db-ad2f1b574361\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.237387 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-plw85\" (UniqueName: \"kubernetes.io/projected/632dcbd3-7f39-484b-92db-ad2f1b574361-kube-api-access-plw85\") pod \"632dcbd3-7f39-484b-92db-ad2f1b574361\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.237584 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-scripts\") pod \"632dcbd3-7f39-484b-92db-ad2f1b574361\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.237610 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/632dcbd3-7f39-484b-92db-ad2f1b574361-log-httpd\") pod \"632dcbd3-7f39-484b-92db-ad2f1b574361\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.237653 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-config-data\") pod \"632dcbd3-7f39-484b-92db-ad2f1b574361\" (UID: \"632dcbd3-7f39-484b-92db-ad2f1b574361\") " Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.250163 4906 scope.go:117] "RemoveContainer" containerID="4840d6aad169d87b8df957df5a6e9e0d01f85a7a51d8f99a35de5056ff5f8167" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.261778 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/632dcbd3-7f39-484b-92db-ad2f1b574361-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "632dcbd3-7f39-484b-92db-ad2f1b574361" (UID: "632dcbd3-7f39-484b-92db-ad2f1b574361"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.262747 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/632dcbd3-7f39-484b-92db-ad2f1b574361-kube-api-access-plw85" (OuterVolumeSpecName: "kube-api-access-plw85") pod "632dcbd3-7f39-484b-92db-ad2f1b574361" (UID: "632dcbd3-7f39-484b-92db-ad2f1b574361"). InnerVolumeSpecName "kube-api-access-plw85". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.265356 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/632dcbd3-7f39-484b-92db-ad2f1b574361-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "632dcbd3-7f39-484b-92db-ad2f1b574361" (UID: "632dcbd3-7f39-484b-92db-ad2f1b574361"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.270216 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-scripts" (OuterVolumeSpecName: "scripts") pod "632dcbd3-7f39-484b-92db-ad2f1b574361" (UID: "632dcbd3-7f39-484b-92db-ad2f1b574361"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.287055 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "632dcbd3-7f39-484b-92db-ad2f1b574361" (UID: "632dcbd3-7f39-484b-92db-ad2f1b574361"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.339936 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.340004 4906 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/632dcbd3-7f39-484b-92db-ad2f1b574361-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.340022 4906 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.340037 4906 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/632dcbd3-7f39-484b-92db-ad2f1b574361-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.340048 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-plw85\" (UniqueName: \"kubernetes.io/projected/632dcbd3-7f39-484b-92db-ad2f1b574361-kube-api-access-plw85\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.344665 4906 scope.go:117] "RemoveContainer" containerID="06c781a656bcf258af2a15e81ebac37f2647cff8973ebe0605fe5309e50340ab" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.371512 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "632dcbd3-7f39-484b-92db-ad2f1b574361" (UID: "632dcbd3-7f39-484b-92db-ad2f1b574361"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.391763 4906 scope.go:117] "RemoveContainer" containerID="50c389a4f7ac83a1d44ce56c9fd2c7d9c946bc8102f7c258fa461296bbd331ca" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.392727 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-config-data" (OuterVolumeSpecName: "config-data") pod "632dcbd3-7f39-484b-92db-ad2f1b574361" (UID: "632dcbd3-7f39-484b-92db-ad2f1b574361"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.442888 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.443288 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632dcbd3-7f39-484b-92db-ad2f1b574361-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.511254 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.539218 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.550071 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:15 crc kubenswrapper[4906]: E0227 08:52:15.550538 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="632dcbd3-7f39-484b-92db-ad2f1b574361" containerName="ceilometer-central-agent" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.550562 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="632dcbd3-7f39-484b-92db-ad2f1b574361" containerName="ceilometer-central-agent" Feb 27 08:52:15 crc kubenswrapper[4906]: E0227 08:52:15.550579 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="632dcbd3-7f39-484b-92db-ad2f1b574361" containerName="proxy-httpd" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.550588 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="632dcbd3-7f39-484b-92db-ad2f1b574361" containerName="proxy-httpd" Feb 27 08:52:15 crc kubenswrapper[4906]: E0227 08:52:15.550602 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="632dcbd3-7f39-484b-92db-ad2f1b574361" containerName="ceilometer-notification-agent" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.550609 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="632dcbd3-7f39-484b-92db-ad2f1b574361" containerName="ceilometer-notification-agent" Feb 27 08:52:15 crc kubenswrapper[4906]: E0227 08:52:15.550621 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="632dcbd3-7f39-484b-92db-ad2f1b574361" containerName="sg-core" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.550627 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="632dcbd3-7f39-484b-92db-ad2f1b574361" containerName="sg-core" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.550806 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="632dcbd3-7f39-484b-92db-ad2f1b574361" containerName="sg-core" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.550826 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="632dcbd3-7f39-484b-92db-ad2f1b574361" containerName="proxy-httpd" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.550836 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="632dcbd3-7f39-484b-92db-ad2f1b574361" containerName="ceilometer-central-agent" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.550845 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="632dcbd3-7f39-484b-92db-ad2f1b574361" containerName="ceilometer-notification-agent" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.558984 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.564506 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.565007 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.595965 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.648480 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.648561 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-config-data\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.648669 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/425b549f-2862-4fc5-9d93-87d6c7973041-run-httpd\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.648780 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-scripts\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.648804 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.648842 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gh95l\" (UniqueName: \"kubernetes.io/projected/425b549f-2862-4fc5-9d93-87d6c7973041-kube-api-access-gh95l\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.648871 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/425b549f-2862-4fc5-9d93-87d6c7973041-log-httpd\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.750303 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/425b549f-2862-4fc5-9d93-87d6c7973041-run-httpd\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.750438 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-scripts\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.750468 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.750511 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gh95l\" (UniqueName: \"kubernetes.io/projected/425b549f-2862-4fc5-9d93-87d6c7973041-kube-api-access-gh95l\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.750544 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/425b549f-2862-4fc5-9d93-87d6c7973041-log-httpd\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.750584 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.750617 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-config-data\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.751312 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/425b549f-2862-4fc5-9d93-87d6c7973041-log-httpd\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.751570 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/425b549f-2862-4fc5-9d93-87d6c7973041-run-httpd\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.758039 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-scripts\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.759612 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.761728 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.770794 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-config-data\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.771278 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gh95l\" (UniqueName: \"kubernetes.io/projected/425b549f-2862-4fc5-9d93-87d6c7973041-kube-api-access-gh95l\") pod \"ceilometer-0\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " pod="openstack/ceilometer-0" Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.800508 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-p544r"] Feb 27 08:52:15 crc kubenswrapper[4906]: I0227 08:52:15.887457 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:52:16 crc kubenswrapper[4906]: I0227 08:52:16.177726 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"00396512-9757-4e1d-b801-074ac259bab9","Type":"ContainerStarted","Data":"002d1e5e817df5cb820c9ad748ea4b32c08edf7dbf980af97a414336af18708a"} Feb 27 08:52:16 crc kubenswrapper[4906]: I0227 08:52:16.178385 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Feb 27 08:52:16 crc kubenswrapper[4906]: I0227 08:52:16.185323 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-p544r" event={"ID":"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b","Type":"ContainerStarted","Data":"d34bc66c9b33f1ea429a866c4537c63fbb6b718fdd52fabeb6c3b22cf002cee7"} Feb 27 08:52:16 crc kubenswrapper[4906]: I0227 08:52:16.204259 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.204233048 podStartE2EDuration="4.204233048s" podCreationTimestamp="2026-02-27 08:52:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:52:16.201730192 +0000 UTC m=+1434.596131802" watchObservedRunningTime="2026-02-27 08:52:16.204233048 +0000 UTC m=+1434.598634678" Feb 27 08:52:16 crc kubenswrapper[4906]: W0227 08:52:16.401265 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod425b549f_2862_4fc5_9d93_87d6c7973041.slice/crio-783a63fee68387d627196c9a57640e7dafe746e837493190ed046dc82409aa3f WatchSource:0}: Error finding container 783a63fee68387d627196c9a57640e7dafe746e837493190ed046dc82409aa3f: Status 404 returned error can't find the container with id 783a63fee68387d627196c9a57640e7dafe746e837493190ed046dc82409aa3f Feb 27 08:52:16 crc kubenswrapper[4906]: I0227 08:52:16.401807 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:16 crc kubenswrapper[4906]: I0227 08:52:16.566622 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="632dcbd3-7f39-484b-92db-ad2f1b574361" path="/var/lib/kubelet/pods/632dcbd3-7f39-484b-92db-ad2f1b574361/volumes" Feb 27 08:52:17 crc kubenswrapper[4906]: I0227 08:52:17.204576 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"425b549f-2862-4fc5-9d93-87d6c7973041","Type":"ContainerStarted","Data":"783a63fee68387d627196c9a57640e7dafe746e837493190ed046dc82409aa3f"} Feb 27 08:52:19 crc kubenswrapper[4906]: I0227 08:52:19.245602 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"425b549f-2862-4fc5-9d93-87d6c7973041","Type":"ContainerStarted","Data":"75fe62e3f9b08ef88be5123afae14942773d4b48776e6077a6c6b97fd094e5b2"} Feb 27 08:52:19 crc kubenswrapper[4906]: I0227 08:52:19.785000 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:20 crc kubenswrapper[4906]: I0227 08:52:20.259348 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"425b549f-2862-4fc5-9d93-87d6c7973041","Type":"ContainerStarted","Data":"db5aec8a9de7fe92e856e17e842d2fabdae17b5adeb375cac63325fbbe7fa4ab"} Feb 27 08:52:20 crc kubenswrapper[4906]: I0227 08:52:20.259682 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"425b549f-2862-4fc5-9d93-87d6c7973041","Type":"ContainerStarted","Data":"c9f64c7d5ec89b9a4fa35879e112ca03f0d80a0fba27b422a37055a5cfa2a6cf"} Feb 27 08:52:22 crc kubenswrapper[4906]: I0227 08:52:22.200729 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-98c78d5f8-j9wmn" podUID="e6ab1c10-b552-4a69-94c7-68280ab7e126" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.149:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.149:8443: connect: connection refused" Feb 27 08:52:22 crc kubenswrapper[4906]: I0227 08:52:22.200899 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:52:25 crc kubenswrapper[4906]: I0227 08:52:25.896776 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Feb 27 08:52:28 crc kubenswrapper[4906]: I0227 08:52:28.351285 4906 generic.go:334] "Generic (PLEG): container finished" podID="e6ab1c10-b552-4a69-94c7-68280ab7e126" containerID="ede6b37c237a95667008a06057fd933008e9e665abcee2a3c2a28ce6a6594391" exitCode=137 Feb 27 08:52:28 crc kubenswrapper[4906]: I0227 08:52:28.351379 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-98c78d5f8-j9wmn" event={"ID":"e6ab1c10-b552-4a69-94c7-68280ab7e126","Type":"ContainerDied","Data":"ede6b37c237a95667008a06057fd933008e9e665abcee2a3c2a28ce6a6594391"} Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.370760 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-98c78d5f8-j9wmn" event={"ID":"e6ab1c10-b552-4a69-94c7-68280ab7e126","Type":"ContainerDied","Data":"4642c6624d1f0b126e2ec3628e2760819a678733aece46dc44970c4450c747df"} Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.371126 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4642c6624d1f0b126e2ec3628e2760819a678733aece46dc44970c4450c747df" Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.553633 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.708269 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e6ab1c10-b552-4a69-94c7-68280ab7e126-config-data\") pod \"e6ab1c10-b552-4a69-94c7-68280ab7e126\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.708644 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e6ab1c10-b552-4a69-94c7-68280ab7e126-scripts\") pod \"e6ab1c10-b552-4a69-94c7-68280ab7e126\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.708706 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e6ab1c10-b552-4a69-94c7-68280ab7e126-horizon-secret-key\") pod \"e6ab1c10-b552-4a69-94c7-68280ab7e126\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.708740 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6ab1c10-b552-4a69-94c7-68280ab7e126-combined-ca-bundle\") pod \"e6ab1c10-b552-4a69-94c7-68280ab7e126\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.708779 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbzfz\" (UniqueName: \"kubernetes.io/projected/e6ab1c10-b552-4a69-94c7-68280ab7e126-kube-api-access-vbzfz\") pod \"e6ab1c10-b552-4a69-94c7-68280ab7e126\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.709001 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6ab1c10-b552-4a69-94c7-68280ab7e126-logs\") pod \"e6ab1c10-b552-4a69-94c7-68280ab7e126\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.709048 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6ab1c10-b552-4a69-94c7-68280ab7e126-horizon-tls-certs\") pod \"e6ab1c10-b552-4a69-94c7-68280ab7e126\" (UID: \"e6ab1c10-b552-4a69-94c7-68280ab7e126\") " Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.710081 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e6ab1c10-b552-4a69-94c7-68280ab7e126-logs" (OuterVolumeSpecName: "logs") pod "e6ab1c10-b552-4a69-94c7-68280ab7e126" (UID: "e6ab1c10-b552-4a69-94c7-68280ab7e126"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.747785 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6ab1c10-b552-4a69-94c7-68280ab7e126-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "e6ab1c10-b552-4a69-94c7-68280ab7e126" (UID: "e6ab1c10-b552-4a69-94c7-68280ab7e126"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.748293 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e6ab1c10-b552-4a69-94c7-68280ab7e126-kube-api-access-vbzfz" (OuterVolumeSpecName: "kube-api-access-vbzfz") pod "e6ab1c10-b552-4a69-94c7-68280ab7e126" (UID: "e6ab1c10-b552-4a69-94c7-68280ab7e126"). InnerVolumeSpecName "kube-api-access-vbzfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.801789 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6ab1c10-b552-4a69-94c7-68280ab7e126-scripts" (OuterVolumeSpecName: "scripts") pod "e6ab1c10-b552-4a69-94c7-68280ab7e126" (UID: "e6ab1c10-b552-4a69-94c7-68280ab7e126"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.811607 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e6ab1c10-b552-4a69-94c7-68280ab7e126-config-data" (OuterVolumeSpecName: "config-data") pod "e6ab1c10-b552-4a69-94c7-68280ab7e126" (UID: "e6ab1c10-b552-4a69-94c7-68280ab7e126"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.813908 4906 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e6ab1c10-b552-4a69-94c7-68280ab7e126-logs\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.827456 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e6ab1c10-b552-4a69-94c7-68280ab7e126-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.827840 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e6ab1c10-b552-4a69-94c7-68280ab7e126-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.827937 4906 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e6ab1c10-b552-4a69-94c7-68280ab7e126-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.828073 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbzfz\" (UniqueName: \"kubernetes.io/projected/e6ab1c10-b552-4a69-94c7-68280ab7e126-kube-api-access-vbzfz\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.855463 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6ab1c10-b552-4a69-94c7-68280ab7e126-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e6ab1c10-b552-4a69-94c7-68280ab7e126" (UID: "e6ab1c10-b552-4a69-94c7-68280ab7e126"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.913028 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e6ab1c10-b552-4a69-94c7-68280ab7e126-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "e6ab1c10-b552-4a69-94c7-68280ab7e126" (UID: "e6ab1c10-b552-4a69-94c7-68280ab7e126"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.930356 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e6ab1c10-b552-4a69-94c7-68280ab7e126-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:29 crc kubenswrapper[4906]: I0227 08:52:29.930408 4906 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/e6ab1c10-b552-4a69-94c7-68280ab7e126-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:30 crc kubenswrapper[4906]: I0227 08:52:30.383570 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-p544r" event={"ID":"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b","Type":"ContainerStarted","Data":"2b2e3e6ca6758894e548e3731051e66d503701c972c66aae0ebb4abf976b6b7a"} Feb 27 08:52:30 crc kubenswrapper[4906]: I0227 08:52:30.387404 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-98c78d5f8-j9wmn" Feb 27 08:52:30 crc kubenswrapper[4906]: I0227 08:52:30.387514 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"425b549f-2862-4fc5-9d93-87d6c7973041","Type":"ContainerStarted","Data":"736c516ac2f69be7814281b929ee6fd1fa97e355a7edbb394c7c0a64ee2d0cfb"} Feb 27 08:52:30 crc kubenswrapper[4906]: I0227 08:52:30.388501 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="425b549f-2862-4fc5-9d93-87d6c7973041" containerName="ceilometer-central-agent" containerID="cri-o://75fe62e3f9b08ef88be5123afae14942773d4b48776e6077a6c6b97fd094e5b2" gracePeriod=30 Feb 27 08:52:30 crc kubenswrapper[4906]: I0227 08:52:30.388638 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="425b549f-2862-4fc5-9d93-87d6c7973041" containerName="proxy-httpd" containerID="cri-o://736c516ac2f69be7814281b929ee6fd1fa97e355a7edbb394c7c0a64ee2d0cfb" gracePeriod=30 Feb 27 08:52:30 crc kubenswrapper[4906]: I0227 08:52:30.388683 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="425b549f-2862-4fc5-9d93-87d6c7973041" containerName="sg-core" containerID="cri-o://db5aec8a9de7fe92e856e17e842d2fabdae17b5adeb375cac63325fbbe7fa4ab" gracePeriod=30 Feb 27 08:52:30 crc kubenswrapper[4906]: I0227 08:52:30.388716 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="425b549f-2862-4fc5-9d93-87d6c7973041" containerName="ceilometer-notification-agent" containerID="cri-o://c9f64c7d5ec89b9a4fa35879e112ca03f0d80a0fba27b422a37055a5cfa2a6cf" gracePeriod=30 Feb 27 08:52:30 crc kubenswrapper[4906]: I0227 08:52:30.419434 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-p544r" podStartSLOduration=2.921071865 podStartE2EDuration="16.419399042s" podCreationTimestamp="2026-02-27 08:52:14 +0000 UTC" firstStartedPulling="2026-02-27 08:52:15.807622732 +0000 UTC m=+1434.202024352" lastFinishedPulling="2026-02-27 08:52:29.305949909 +0000 UTC m=+1447.700351529" observedRunningTime="2026-02-27 08:52:30.412517731 +0000 UTC m=+1448.806919351" watchObservedRunningTime="2026-02-27 08:52:30.419399042 +0000 UTC m=+1448.813800652" Feb 27 08:52:30 crc kubenswrapper[4906]: I0227 08:52:30.445190 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-98c78d5f8-j9wmn"] Feb 27 08:52:30 crc kubenswrapper[4906]: I0227 08:52:30.454705 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-98c78d5f8-j9wmn"] Feb 27 08:52:30 crc kubenswrapper[4906]: I0227 08:52:30.471919 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.567701108 podStartE2EDuration="15.4718736s" podCreationTimestamp="2026-02-27 08:52:15 +0000 UTC" firstStartedPulling="2026-02-27 08:52:16.404618141 +0000 UTC m=+1434.799019761" lastFinishedPulling="2026-02-27 08:52:29.308790633 +0000 UTC m=+1447.703192253" observedRunningTime="2026-02-27 08:52:30.462263857 +0000 UTC m=+1448.856665467" watchObservedRunningTime="2026-02-27 08:52:30.4718736 +0000 UTC m=+1448.866275210" Feb 27 08:52:30 crc kubenswrapper[4906]: I0227 08:52:30.566103 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e6ab1c10-b552-4a69-94c7-68280ab7e126" path="/var/lib/kubelet/pods/e6ab1c10-b552-4a69-94c7-68280ab7e126/volumes" Feb 27 08:52:31 crc kubenswrapper[4906]: I0227 08:52:31.447055 4906 generic.go:334] "Generic (PLEG): container finished" podID="425b549f-2862-4fc5-9d93-87d6c7973041" containerID="736c516ac2f69be7814281b929ee6fd1fa97e355a7edbb394c7c0a64ee2d0cfb" exitCode=0 Feb 27 08:52:31 crc kubenswrapper[4906]: I0227 08:52:31.447165 4906 generic.go:334] "Generic (PLEG): container finished" podID="425b549f-2862-4fc5-9d93-87d6c7973041" containerID="db5aec8a9de7fe92e856e17e842d2fabdae17b5adeb375cac63325fbbe7fa4ab" exitCode=2 Feb 27 08:52:31 crc kubenswrapper[4906]: I0227 08:52:31.447178 4906 generic.go:334] "Generic (PLEG): container finished" podID="425b549f-2862-4fc5-9d93-87d6c7973041" containerID="c9f64c7d5ec89b9a4fa35879e112ca03f0d80a0fba27b422a37055a5cfa2a6cf" exitCode=0 Feb 27 08:52:31 crc kubenswrapper[4906]: I0227 08:52:31.448248 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"425b549f-2862-4fc5-9d93-87d6c7973041","Type":"ContainerDied","Data":"736c516ac2f69be7814281b929ee6fd1fa97e355a7edbb394c7c0a64ee2d0cfb"} Feb 27 08:52:31 crc kubenswrapper[4906]: I0227 08:52:31.448341 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"425b549f-2862-4fc5-9d93-87d6c7973041","Type":"ContainerDied","Data":"db5aec8a9de7fe92e856e17e842d2fabdae17b5adeb375cac63325fbbe7fa4ab"} Feb 27 08:52:31 crc kubenswrapper[4906]: I0227 08:52:31.448360 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"425b549f-2862-4fc5-9d93-87d6c7973041","Type":"ContainerDied","Data":"c9f64c7d5ec89b9a4fa35879e112ca03f0d80a0fba27b422a37055a5cfa2a6cf"} Feb 27 08:52:31 crc kubenswrapper[4906]: I0227 08:52:31.873814 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:52:31 crc kubenswrapper[4906]: I0227 08:52:31.978085 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-combined-ca-bundle\") pod \"425b549f-2862-4fc5-9d93-87d6c7973041\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " Feb 27 08:52:31 crc kubenswrapper[4906]: I0227 08:52:31.978212 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/425b549f-2862-4fc5-9d93-87d6c7973041-log-httpd\") pod \"425b549f-2862-4fc5-9d93-87d6c7973041\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " Feb 27 08:52:31 crc kubenswrapper[4906]: I0227 08:52:31.978248 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-scripts\") pod \"425b549f-2862-4fc5-9d93-87d6c7973041\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " Feb 27 08:52:31 crc kubenswrapper[4906]: I0227 08:52:31.978275 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-config-data\") pod \"425b549f-2862-4fc5-9d93-87d6c7973041\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " Feb 27 08:52:31 crc kubenswrapper[4906]: I0227 08:52:31.978369 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gh95l\" (UniqueName: \"kubernetes.io/projected/425b549f-2862-4fc5-9d93-87d6c7973041-kube-api-access-gh95l\") pod \"425b549f-2862-4fc5-9d93-87d6c7973041\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " Feb 27 08:52:31 crc kubenswrapper[4906]: I0227 08:52:31.978446 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-sg-core-conf-yaml\") pod \"425b549f-2862-4fc5-9d93-87d6c7973041\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " Feb 27 08:52:31 crc kubenswrapper[4906]: I0227 08:52:31.978591 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/425b549f-2862-4fc5-9d93-87d6c7973041-run-httpd\") pod \"425b549f-2862-4fc5-9d93-87d6c7973041\" (UID: \"425b549f-2862-4fc5-9d93-87d6c7973041\") " Feb 27 08:52:31 crc kubenswrapper[4906]: I0227 08:52:31.979760 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/425b549f-2862-4fc5-9d93-87d6c7973041-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "425b549f-2862-4fc5-9d93-87d6c7973041" (UID: "425b549f-2862-4fc5-9d93-87d6c7973041"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:52:31 crc kubenswrapper[4906]: I0227 08:52:31.982526 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/425b549f-2862-4fc5-9d93-87d6c7973041-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "425b549f-2862-4fc5-9d93-87d6c7973041" (UID: "425b549f-2862-4fc5-9d93-87d6c7973041"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:52:31 crc kubenswrapper[4906]: I0227 08:52:31.991685 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-scripts" (OuterVolumeSpecName: "scripts") pod "425b549f-2862-4fc5-9d93-87d6c7973041" (UID: "425b549f-2862-4fc5-9d93-87d6c7973041"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:31 crc kubenswrapper[4906]: I0227 08:52:31.993128 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/425b549f-2862-4fc5-9d93-87d6c7973041-kube-api-access-gh95l" (OuterVolumeSpecName: "kube-api-access-gh95l") pod "425b549f-2862-4fc5-9d93-87d6c7973041" (UID: "425b549f-2862-4fc5-9d93-87d6c7973041"). InnerVolumeSpecName "kube-api-access-gh95l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.074298 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "425b549f-2862-4fc5-9d93-87d6c7973041" (UID: "425b549f-2862-4fc5-9d93-87d6c7973041"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.083299 4906 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/425b549f-2862-4fc5-9d93-87d6c7973041-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.083362 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.083376 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gh95l\" (UniqueName: \"kubernetes.io/projected/425b549f-2862-4fc5-9d93-87d6c7973041-kube-api-access-gh95l\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.083391 4906 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.083401 4906 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/425b549f-2862-4fc5-9d93-87d6c7973041-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.099609 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "425b549f-2862-4fc5-9d93-87d6c7973041" (UID: "425b549f-2862-4fc5-9d93-87d6c7973041"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.143539 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-config-data" (OuterVolumeSpecName: "config-data") pod "425b549f-2862-4fc5-9d93-87d6c7973041" (UID: "425b549f-2862-4fc5-9d93-87d6c7973041"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.186098 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.186150 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/425b549f-2862-4fc5-9d93-87d6c7973041-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.461288 4906 generic.go:334] "Generic (PLEG): container finished" podID="425b549f-2862-4fc5-9d93-87d6c7973041" containerID="75fe62e3f9b08ef88be5123afae14942773d4b48776e6077a6c6b97fd094e5b2" exitCode=0 Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.461335 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"425b549f-2862-4fc5-9d93-87d6c7973041","Type":"ContainerDied","Data":"75fe62e3f9b08ef88be5123afae14942773d4b48776e6077a6c6b97fd094e5b2"} Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.461377 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.461407 4906 scope.go:117] "RemoveContainer" containerID="736c516ac2f69be7814281b929ee6fd1fa97e355a7edbb394c7c0a64ee2d0cfb" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.461391 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"425b549f-2862-4fc5-9d93-87d6c7973041","Type":"ContainerDied","Data":"783a63fee68387d627196c9a57640e7dafe746e837493190ed046dc82409aa3f"} Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.506382 4906 scope.go:117] "RemoveContainer" containerID="db5aec8a9de7fe92e856e17e842d2fabdae17b5adeb375cac63325fbbe7fa4ab" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.523151 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.561409 4906 scope.go:117] "RemoveContainer" containerID="c9f64c7d5ec89b9a4fa35879e112ca03f0d80a0fba27b422a37055a5cfa2a6cf" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.615721 4906 scope.go:117] "RemoveContainer" containerID="75fe62e3f9b08ef88be5123afae14942773d4b48776e6077a6c6b97fd094e5b2" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.622157 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.622223 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:32 crc kubenswrapper[4906]: E0227 08:52:32.622547 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6ab1c10-b552-4a69-94c7-68280ab7e126" containerName="horizon" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.622565 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6ab1c10-b552-4a69-94c7-68280ab7e126" containerName="horizon" Feb 27 08:52:32 crc kubenswrapper[4906]: E0227 08:52:32.622580 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="425b549f-2862-4fc5-9d93-87d6c7973041" containerName="ceilometer-notification-agent" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.622586 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="425b549f-2862-4fc5-9d93-87d6c7973041" containerName="ceilometer-notification-agent" Feb 27 08:52:32 crc kubenswrapper[4906]: E0227 08:52:32.622608 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="425b549f-2862-4fc5-9d93-87d6c7973041" containerName="ceilometer-central-agent" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.622614 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="425b549f-2862-4fc5-9d93-87d6c7973041" containerName="ceilometer-central-agent" Feb 27 08:52:32 crc kubenswrapper[4906]: E0227 08:52:32.622627 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e6ab1c10-b552-4a69-94c7-68280ab7e126" containerName="horizon-log" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.622632 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="e6ab1c10-b552-4a69-94c7-68280ab7e126" containerName="horizon-log" Feb 27 08:52:32 crc kubenswrapper[4906]: E0227 08:52:32.622653 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="425b549f-2862-4fc5-9d93-87d6c7973041" containerName="sg-core" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.622658 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="425b549f-2862-4fc5-9d93-87d6c7973041" containerName="sg-core" Feb 27 08:52:32 crc kubenswrapper[4906]: E0227 08:52:32.622670 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="425b549f-2862-4fc5-9d93-87d6c7973041" containerName="proxy-httpd" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.622677 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="425b549f-2862-4fc5-9d93-87d6c7973041" containerName="proxy-httpd" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.622896 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="425b549f-2862-4fc5-9d93-87d6c7973041" containerName="proxy-httpd" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.622909 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6ab1c10-b552-4a69-94c7-68280ab7e126" containerName="horizon" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.622920 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="425b549f-2862-4fc5-9d93-87d6c7973041" containerName="sg-core" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.622927 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="425b549f-2862-4fc5-9d93-87d6c7973041" containerName="ceilometer-notification-agent" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.622937 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="425b549f-2862-4fc5-9d93-87d6c7973041" containerName="ceilometer-central-agent" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.622952 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="e6ab1c10-b552-4a69-94c7-68280ab7e126" containerName="horizon-log" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.624661 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.632147 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.636755 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.648979 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.702031 4906 scope.go:117] "RemoveContainer" containerID="736c516ac2f69be7814281b929ee6fd1fa97e355a7edbb394c7c0a64ee2d0cfb" Feb 27 08:52:32 crc kubenswrapper[4906]: E0227 08:52:32.705615 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"736c516ac2f69be7814281b929ee6fd1fa97e355a7edbb394c7c0a64ee2d0cfb\": container with ID starting with 736c516ac2f69be7814281b929ee6fd1fa97e355a7edbb394c7c0a64ee2d0cfb not found: ID does not exist" containerID="736c516ac2f69be7814281b929ee6fd1fa97e355a7edbb394c7c0a64ee2d0cfb" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.705691 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"736c516ac2f69be7814281b929ee6fd1fa97e355a7edbb394c7c0a64ee2d0cfb"} err="failed to get container status \"736c516ac2f69be7814281b929ee6fd1fa97e355a7edbb394c7c0a64ee2d0cfb\": rpc error: code = NotFound desc = could not find container \"736c516ac2f69be7814281b929ee6fd1fa97e355a7edbb394c7c0a64ee2d0cfb\": container with ID starting with 736c516ac2f69be7814281b929ee6fd1fa97e355a7edbb394c7c0a64ee2d0cfb not found: ID does not exist" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.705740 4906 scope.go:117] "RemoveContainer" containerID="db5aec8a9de7fe92e856e17e842d2fabdae17b5adeb375cac63325fbbe7fa4ab" Feb 27 08:52:32 crc kubenswrapper[4906]: E0227 08:52:32.709240 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"db5aec8a9de7fe92e856e17e842d2fabdae17b5adeb375cac63325fbbe7fa4ab\": container with ID starting with db5aec8a9de7fe92e856e17e842d2fabdae17b5adeb375cac63325fbbe7fa4ab not found: ID does not exist" containerID="db5aec8a9de7fe92e856e17e842d2fabdae17b5adeb375cac63325fbbe7fa4ab" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.709285 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"db5aec8a9de7fe92e856e17e842d2fabdae17b5adeb375cac63325fbbe7fa4ab"} err="failed to get container status \"db5aec8a9de7fe92e856e17e842d2fabdae17b5adeb375cac63325fbbe7fa4ab\": rpc error: code = NotFound desc = could not find container \"db5aec8a9de7fe92e856e17e842d2fabdae17b5adeb375cac63325fbbe7fa4ab\": container with ID starting with db5aec8a9de7fe92e856e17e842d2fabdae17b5adeb375cac63325fbbe7fa4ab not found: ID does not exist" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.709316 4906 scope.go:117] "RemoveContainer" containerID="c9f64c7d5ec89b9a4fa35879e112ca03f0d80a0fba27b422a37055a5cfa2a6cf" Feb 27 08:52:32 crc kubenswrapper[4906]: E0227 08:52:32.715095 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9f64c7d5ec89b9a4fa35879e112ca03f0d80a0fba27b422a37055a5cfa2a6cf\": container with ID starting with c9f64c7d5ec89b9a4fa35879e112ca03f0d80a0fba27b422a37055a5cfa2a6cf not found: ID does not exist" containerID="c9f64c7d5ec89b9a4fa35879e112ca03f0d80a0fba27b422a37055a5cfa2a6cf" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.715151 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9f64c7d5ec89b9a4fa35879e112ca03f0d80a0fba27b422a37055a5cfa2a6cf"} err="failed to get container status \"c9f64c7d5ec89b9a4fa35879e112ca03f0d80a0fba27b422a37055a5cfa2a6cf\": rpc error: code = NotFound desc = could not find container \"c9f64c7d5ec89b9a4fa35879e112ca03f0d80a0fba27b422a37055a5cfa2a6cf\": container with ID starting with c9f64c7d5ec89b9a4fa35879e112ca03f0d80a0fba27b422a37055a5cfa2a6cf not found: ID does not exist" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.715240 4906 scope.go:117] "RemoveContainer" containerID="75fe62e3f9b08ef88be5123afae14942773d4b48776e6077a6c6b97fd094e5b2" Feb 27 08:52:32 crc kubenswrapper[4906]: E0227 08:52:32.721059 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75fe62e3f9b08ef88be5123afae14942773d4b48776e6077a6c6b97fd094e5b2\": container with ID starting with 75fe62e3f9b08ef88be5123afae14942773d4b48776e6077a6c6b97fd094e5b2 not found: ID does not exist" containerID="75fe62e3f9b08ef88be5123afae14942773d4b48776e6077a6c6b97fd094e5b2" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.721118 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75fe62e3f9b08ef88be5123afae14942773d4b48776e6077a6c6b97fd094e5b2"} err="failed to get container status \"75fe62e3f9b08ef88be5123afae14942773d4b48776e6077a6c6b97fd094e5b2\": rpc error: code = NotFound desc = could not find container \"75fe62e3f9b08ef88be5123afae14942773d4b48776e6077a6c6b97fd094e5b2\": container with ID starting with 75fe62e3f9b08ef88be5123afae14942773d4b48776e6077a6c6b97fd094e5b2 not found: ID does not exist" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.799538 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.799993 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.800179 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/765e8be6-5069-4354-8552-d8771efae27c-run-httpd\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.800348 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/765e8be6-5069-4354-8552-d8771efae27c-log-httpd\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.800518 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-scripts\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.800633 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-config-data\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.800737 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58gmd\" (UniqueName: \"kubernetes.io/projected/765e8be6-5069-4354-8552-d8771efae27c-kube-api-access-58gmd\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.903900 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.904080 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/765e8be6-5069-4354-8552-d8771efae27c-run-httpd\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.904189 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/765e8be6-5069-4354-8552-d8771efae27c-log-httpd\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.904257 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-scripts\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.904297 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-config-data\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.904322 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58gmd\" (UniqueName: \"kubernetes.io/projected/765e8be6-5069-4354-8552-d8771efae27c-kube-api-access-58gmd\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.904372 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.905198 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/765e8be6-5069-4354-8552-d8771efae27c-log-httpd\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.905322 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/765e8be6-5069-4354-8552-d8771efae27c-run-httpd\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.910225 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-config-data\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.911317 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.915752 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.925020 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-scripts\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.934016 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58gmd\" (UniqueName: \"kubernetes.io/projected/765e8be6-5069-4354-8552-d8771efae27c-kube-api-access-58gmd\") pod \"ceilometer-0\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " pod="openstack/ceilometer-0" Feb 27 08:52:32 crc kubenswrapper[4906]: I0227 08:52:32.974803 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:52:34 crc kubenswrapper[4906]: I0227 08:52:34.195702 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:34 crc kubenswrapper[4906]: W0227 08:52:34.209610 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod765e8be6_5069_4354_8552_d8771efae27c.slice/crio-b6ab56fe4d947e952471bcc4d52768203a7c3ace4e62bcb4d19cde45b0b88f85 WatchSource:0}: Error finding container b6ab56fe4d947e952471bcc4d52768203a7c3ace4e62bcb4d19cde45b0b88f85: Status 404 returned error can't find the container with id b6ab56fe4d947e952471bcc4d52768203a7c3ace4e62bcb4d19cde45b0b88f85 Feb 27 08:52:34 crc kubenswrapper[4906]: I0227 08:52:34.484287 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"765e8be6-5069-4354-8552-d8771efae27c","Type":"ContainerStarted","Data":"b6ab56fe4d947e952471bcc4d52768203a7c3ace4e62bcb4d19cde45b0b88f85"} Feb 27 08:52:34 crc kubenswrapper[4906]: I0227 08:52:34.582239 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="425b549f-2862-4fc5-9d93-87d6c7973041" path="/var/lib/kubelet/pods/425b549f-2862-4fc5-9d93-87d6c7973041/volumes" Feb 27 08:52:35 crc kubenswrapper[4906]: I0227 08:52:35.460685 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:35 crc kubenswrapper[4906]: I0227 08:52:35.498914 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"765e8be6-5069-4354-8552-d8771efae27c","Type":"ContainerStarted","Data":"c93e04aac3f49823e50ad872c303aa87a369dd8d13f24bbf659b849a049e6621"} Feb 27 08:52:36 crc kubenswrapper[4906]: I0227 08:52:36.514536 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"765e8be6-5069-4354-8552-d8771efae27c","Type":"ContainerStarted","Data":"94ffffe4681f59ad3db82f7bb529682fde43af4743b824c771a66455ec7dc73d"} Feb 27 08:52:37 crc kubenswrapper[4906]: I0227 08:52:37.552850 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"765e8be6-5069-4354-8552-d8771efae27c","Type":"ContainerStarted","Data":"7a766cf0066b091d80d6a0a7c14085467d9787c263d38e1ac76b3b3ceb8e2019"} Feb 27 08:52:41 crc kubenswrapper[4906]: I0227 08:52:41.597583 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"765e8be6-5069-4354-8552-d8771efae27c","Type":"ContainerStarted","Data":"48c99ba5361f49134c92955907415c63befe9dd15596f9bddc355e3a580e494a"} Feb 27 08:52:41 crc kubenswrapper[4906]: I0227 08:52:41.598468 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 27 08:52:41 crc kubenswrapper[4906]: I0227 08:52:41.597837 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="765e8be6-5069-4354-8552-d8771efae27c" containerName="proxy-httpd" containerID="cri-o://48c99ba5361f49134c92955907415c63befe9dd15596f9bddc355e3a580e494a" gracePeriod=30 Feb 27 08:52:41 crc kubenswrapper[4906]: I0227 08:52:41.597834 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="765e8be6-5069-4354-8552-d8771efae27c" containerName="sg-core" containerID="cri-o://7a766cf0066b091d80d6a0a7c14085467d9787c263d38e1ac76b3b3ceb8e2019" gracePeriod=30 Feb 27 08:52:41 crc kubenswrapper[4906]: I0227 08:52:41.597725 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="765e8be6-5069-4354-8552-d8771efae27c" containerName="ceilometer-central-agent" containerID="cri-o://c93e04aac3f49823e50ad872c303aa87a369dd8d13f24bbf659b849a049e6621" gracePeriod=30 Feb 27 08:52:41 crc kubenswrapper[4906]: I0227 08:52:41.597919 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="765e8be6-5069-4354-8552-d8771efae27c" containerName="ceilometer-notification-agent" containerID="cri-o://94ffffe4681f59ad3db82f7bb529682fde43af4743b824c771a66455ec7dc73d" gracePeriod=30 Feb 27 08:52:41 crc kubenswrapper[4906]: I0227 08:52:41.626372 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.886672977 podStartE2EDuration="9.626347791s" podCreationTimestamp="2026-02-27 08:52:32 +0000 UTC" firstStartedPulling="2026-02-27 08:52:34.212291775 +0000 UTC m=+1452.606693385" lastFinishedPulling="2026-02-27 08:52:40.951966589 +0000 UTC m=+1459.346368199" observedRunningTime="2026-02-27 08:52:41.623761933 +0000 UTC m=+1460.018163553" watchObservedRunningTime="2026-02-27 08:52:41.626347791 +0000 UTC m=+1460.020749401" Feb 27 08:52:42 crc kubenswrapper[4906]: I0227 08:52:42.611841 4906 generic.go:334] "Generic (PLEG): container finished" podID="765e8be6-5069-4354-8552-d8771efae27c" containerID="48c99ba5361f49134c92955907415c63befe9dd15596f9bddc355e3a580e494a" exitCode=0 Feb 27 08:52:42 crc kubenswrapper[4906]: I0227 08:52:42.612173 4906 generic.go:334] "Generic (PLEG): container finished" podID="765e8be6-5069-4354-8552-d8771efae27c" containerID="7a766cf0066b091d80d6a0a7c14085467d9787c263d38e1ac76b3b3ceb8e2019" exitCode=2 Feb 27 08:52:42 crc kubenswrapper[4906]: I0227 08:52:42.612183 4906 generic.go:334] "Generic (PLEG): container finished" podID="765e8be6-5069-4354-8552-d8771efae27c" containerID="94ffffe4681f59ad3db82f7bb529682fde43af4743b824c771a66455ec7dc73d" exitCode=0 Feb 27 08:52:42 crc kubenswrapper[4906]: I0227 08:52:42.611914 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"765e8be6-5069-4354-8552-d8771efae27c","Type":"ContainerDied","Data":"48c99ba5361f49134c92955907415c63befe9dd15596f9bddc355e3a580e494a"} Feb 27 08:52:42 crc kubenswrapper[4906]: I0227 08:52:42.612216 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"765e8be6-5069-4354-8552-d8771efae27c","Type":"ContainerDied","Data":"7a766cf0066b091d80d6a0a7c14085467d9787c263d38e1ac76b3b3ceb8e2019"} Feb 27 08:52:42 crc kubenswrapper[4906]: I0227 08:52:42.612228 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"765e8be6-5069-4354-8552-d8771efae27c","Type":"ContainerDied","Data":"94ffffe4681f59ad3db82f7bb529682fde43af4743b824c771a66455ec7dc73d"} Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.645832 4906 generic.go:334] "Generic (PLEG): container finished" podID="765e8be6-5069-4354-8552-d8771efae27c" containerID="c93e04aac3f49823e50ad872c303aa87a369dd8d13f24bbf659b849a049e6621" exitCode=0 Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.645905 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"765e8be6-5069-4354-8552-d8771efae27c","Type":"ContainerDied","Data":"c93e04aac3f49823e50ad872c303aa87a369dd8d13f24bbf659b849a049e6621"} Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.646434 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"765e8be6-5069-4354-8552-d8771efae27c","Type":"ContainerDied","Data":"b6ab56fe4d947e952471bcc4d52768203a7c3ace4e62bcb4d19cde45b0b88f85"} Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.646453 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b6ab56fe4d947e952471bcc4d52768203a7c3ace4e62bcb4d19cde45b0b88f85" Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.656926 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.798923 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/765e8be6-5069-4354-8552-d8771efae27c-log-httpd\") pod \"765e8be6-5069-4354-8552-d8771efae27c\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.799049 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-scripts\") pod \"765e8be6-5069-4354-8552-d8771efae27c\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.799184 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58gmd\" (UniqueName: \"kubernetes.io/projected/765e8be6-5069-4354-8552-d8771efae27c-kube-api-access-58gmd\") pod \"765e8be6-5069-4354-8552-d8771efae27c\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.799210 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-sg-core-conf-yaml\") pod \"765e8be6-5069-4354-8552-d8771efae27c\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.799258 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-combined-ca-bundle\") pod \"765e8be6-5069-4354-8552-d8771efae27c\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.799282 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/765e8be6-5069-4354-8552-d8771efae27c-run-httpd\") pod \"765e8be6-5069-4354-8552-d8771efae27c\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.799334 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-config-data\") pod \"765e8be6-5069-4354-8552-d8771efae27c\" (UID: \"765e8be6-5069-4354-8552-d8771efae27c\") " Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.799742 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/765e8be6-5069-4354-8552-d8771efae27c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "765e8be6-5069-4354-8552-d8771efae27c" (UID: "765e8be6-5069-4354-8552-d8771efae27c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.799987 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/765e8be6-5069-4354-8552-d8771efae27c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "765e8be6-5069-4354-8552-d8771efae27c" (UID: "765e8be6-5069-4354-8552-d8771efae27c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.808948 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-scripts" (OuterVolumeSpecName: "scripts") pod "765e8be6-5069-4354-8552-d8771efae27c" (UID: "765e8be6-5069-4354-8552-d8771efae27c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.809004 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/765e8be6-5069-4354-8552-d8771efae27c-kube-api-access-58gmd" (OuterVolumeSpecName: "kube-api-access-58gmd") pod "765e8be6-5069-4354-8552-d8771efae27c" (UID: "765e8be6-5069-4354-8552-d8771efae27c"). InnerVolumeSpecName "kube-api-access-58gmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.836175 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "765e8be6-5069-4354-8552-d8771efae27c" (UID: "765e8be6-5069-4354-8552-d8771efae27c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.902337 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.902771 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58gmd\" (UniqueName: \"kubernetes.io/projected/765e8be6-5069-4354-8552-d8771efae27c-kube-api-access-58gmd\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.902784 4906 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.902793 4906 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/765e8be6-5069-4354-8552-d8771efae27c-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.902801 4906 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/765e8be6-5069-4354-8552-d8771efae27c-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.912689 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "765e8be6-5069-4354-8552-d8771efae27c" (UID: "765e8be6-5069-4354-8552-d8771efae27c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:45 crc kubenswrapper[4906]: I0227 08:52:45.913104 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-config-data" (OuterVolumeSpecName: "config-data") pod "765e8be6-5069-4354-8552-d8771efae27c" (UID: "765e8be6-5069-4354-8552-d8771efae27c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.005291 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.005335 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/765e8be6-5069-4354-8552-d8771efae27c-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.659478 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.691104 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.701680 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.716081 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:46 crc kubenswrapper[4906]: E0227 08:52:46.716678 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="765e8be6-5069-4354-8552-d8771efae27c" containerName="sg-core" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.716706 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="765e8be6-5069-4354-8552-d8771efae27c" containerName="sg-core" Feb 27 08:52:46 crc kubenswrapper[4906]: E0227 08:52:46.716741 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="765e8be6-5069-4354-8552-d8771efae27c" containerName="ceilometer-central-agent" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.716749 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="765e8be6-5069-4354-8552-d8771efae27c" containerName="ceilometer-central-agent" Feb 27 08:52:46 crc kubenswrapper[4906]: E0227 08:52:46.716770 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="765e8be6-5069-4354-8552-d8771efae27c" containerName="proxy-httpd" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.716776 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="765e8be6-5069-4354-8552-d8771efae27c" containerName="proxy-httpd" Feb 27 08:52:46 crc kubenswrapper[4906]: E0227 08:52:46.716788 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="765e8be6-5069-4354-8552-d8771efae27c" containerName="ceilometer-notification-agent" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.716794 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="765e8be6-5069-4354-8552-d8771efae27c" containerName="ceilometer-notification-agent" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.716982 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="765e8be6-5069-4354-8552-d8771efae27c" containerName="sg-core" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.716997 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="765e8be6-5069-4354-8552-d8771efae27c" containerName="ceilometer-notification-agent" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.717017 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="765e8be6-5069-4354-8552-d8771efae27c" containerName="proxy-httpd" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.717035 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="765e8be6-5069-4354-8552-d8771efae27c" containerName="ceilometer-central-agent" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.722115 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.728659 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.729431 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.748288 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.824415 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-log-httpd\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.824533 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.824567 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.824631 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-config-data\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.824668 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7xgdv\" (UniqueName: \"kubernetes.io/projected/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-kube-api-access-7xgdv\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.824687 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-run-httpd\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.824710 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-scripts\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.943491 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.943652 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.943942 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-config-data\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.945245 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7xgdv\" (UniqueName: \"kubernetes.io/projected/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-kube-api-access-7xgdv\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.945314 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-run-httpd\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.945399 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-scripts\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.945472 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-log-httpd\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.948766 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-log-httpd\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.949290 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-run-httpd\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.955281 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-scripts\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.958511 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-config-data\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.971846 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.973144 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:46 crc kubenswrapper[4906]: I0227 08:52:46.987796 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7xgdv\" (UniqueName: \"kubernetes.io/projected/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-kube-api-access-7xgdv\") pod \"ceilometer-0\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " pod="openstack/ceilometer-0" Feb 27 08:52:47 crc kubenswrapper[4906]: I0227 08:52:47.053355 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:52:47 crc kubenswrapper[4906]: I0227 08:52:47.593154 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:47 crc kubenswrapper[4906]: I0227 08:52:47.680131 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38047433-8d9f-4f90-a9fe-8a1960cc3d9f","Type":"ContainerStarted","Data":"aafc0c5b7e47f3e189e1ab149a62275d8e1d143ba31458c2d8dc1e8479d14f77"} Feb 27 08:52:48 crc kubenswrapper[4906]: I0227 08:52:48.564343 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="765e8be6-5069-4354-8552-d8771efae27c" path="/var/lib/kubelet/pods/765e8be6-5069-4354-8552-d8771efae27c/volumes" Feb 27 08:52:49 crc kubenswrapper[4906]: I0227 08:52:49.152229 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:49 crc kubenswrapper[4906]: I0227 08:52:49.701216 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38047433-8d9f-4f90-a9fe-8a1960cc3d9f","Type":"ContainerStarted","Data":"8b8637fea46e9b258704b0f1109078ff6ecbf280897e1ece1c856f8a714efe13"} Feb 27 08:52:50 crc kubenswrapper[4906]: I0227 08:52:50.717784 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38047433-8d9f-4f90-a9fe-8a1960cc3d9f","Type":"ContainerStarted","Data":"5ac839e1a02522108fd6536ad697153cc0b81bf78d1d6422e4e01e016395997b"} Feb 27 08:52:52 crc kubenswrapper[4906]: I0227 08:52:52.744535 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38047433-8d9f-4f90-a9fe-8a1960cc3d9f","Type":"ContainerStarted","Data":"100e4949081ccc93386dfb0fa91ecb115987a12fb03fb26290c6b916ee97fcaf"} Feb 27 08:52:55 crc kubenswrapper[4906]: I0227 08:52:55.779628 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38047433-8d9f-4f90-a9fe-8a1960cc3d9f","Type":"ContainerStarted","Data":"3b20cfcc12c3e8f44bf7d564608f1416cc432a4b43a9fa459a3284fea00ef304"} Feb 27 08:52:55 crc kubenswrapper[4906]: I0227 08:52:55.780223 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 27 08:52:55 crc kubenswrapper[4906]: I0227 08:52:55.780225 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" containerName="proxy-httpd" containerID="cri-o://3b20cfcc12c3e8f44bf7d564608f1416cc432a4b43a9fa459a3284fea00ef304" gracePeriod=30 Feb 27 08:52:55 crc kubenswrapper[4906]: I0227 08:52:55.780246 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" containerName="sg-core" containerID="cri-o://100e4949081ccc93386dfb0fa91ecb115987a12fb03fb26290c6b916ee97fcaf" gracePeriod=30 Feb 27 08:52:55 crc kubenswrapper[4906]: I0227 08:52:55.780300 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" containerName="ceilometer-notification-agent" containerID="cri-o://5ac839e1a02522108fd6536ad697153cc0b81bf78d1d6422e4e01e016395997b" gracePeriod=30 Feb 27 08:52:55 crc kubenswrapper[4906]: I0227 08:52:55.779829 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" containerName="ceilometer-central-agent" containerID="cri-o://8b8637fea46e9b258704b0f1109078ff6ecbf280897e1ece1c856f8a714efe13" gracePeriod=30 Feb 27 08:52:55 crc kubenswrapper[4906]: I0227 08:52:55.802926 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.317433161 podStartE2EDuration="9.802902282s" podCreationTimestamp="2026-02-27 08:52:46 +0000 UTC" firstStartedPulling="2026-02-27 08:52:47.607173097 +0000 UTC m=+1466.001574707" lastFinishedPulling="2026-02-27 08:52:55.092642228 +0000 UTC m=+1473.487043828" observedRunningTime="2026-02-27 08:52:55.800253673 +0000 UTC m=+1474.194655313" watchObservedRunningTime="2026-02-27 08:52:55.802902282 +0000 UTC m=+1474.197303892" Feb 27 08:52:56 crc kubenswrapper[4906]: I0227 08:52:56.797484 4906 generic.go:334] "Generic (PLEG): container finished" podID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" containerID="3b20cfcc12c3e8f44bf7d564608f1416cc432a4b43a9fa459a3284fea00ef304" exitCode=0 Feb 27 08:52:56 crc kubenswrapper[4906]: I0227 08:52:56.798193 4906 generic.go:334] "Generic (PLEG): container finished" podID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" containerID="100e4949081ccc93386dfb0fa91ecb115987a12fb03fb26290c6b916ee97fcaf" exitCode=2 Feb 27 08:52:56 crc kubenswrapper[4906]: I0227 08:52:56.798210 4906 generic.go:334] "Generic (PLEG): container finished" podID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" containerID="5ac839e1a02522108fd6536ad697153cc0b81bf78d1d6422e4e01e016395997b" exitCode=0 Feb 27 08:52:56 crc kubenswrapper[4906]: I0227 08:52:56.798243 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38047433-8d9f-4f90-a9fe-8a1960cc3d9f","Type":"ContainerDied","Data":"3b20cfcc12c3e8f44bf7d564608f1416cc432a4b43a9fa459a3284fea00ef304"} Feb 27 08:52:56 crc kubenswrapper[4906]: I0227 08:52:56.798282 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38047433-8d9f-4f90-a9fe-8a1960cc3d9f","Type":"ContainerDied","Data":"100e4949081ccc93386dfb0fa91ecb115987a12fb03fb26290c6b916ee97fcaf"} Feb 27 08:52:56 crc kubenswrapper[4906]: I0227 08:52:56.798298 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38047433-8d9f-4f90-a9fe-8a1960cc3d9f","Type":"ContainerDied","Data":"5ac839e1a02522108fd6536ad697153cc0b81bf78d1d6422e4e01e016395997b"} Feb 27 08:52:57 crc kubenswrapper[4906]: I0227 08:52:57.810996 4906 generic.go:334] "Generic (PLEG): container finished" podID="fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b" containerID="2b2e3e6ca6758894e548e3731051e66d503701c972c66aae0ebb4abf976b6b7a" exitCode=0 Feb 27 08:52:57 crc kubenswrapper[4906]: I0227 08:52:57.811078 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-p544r" event={"ID":"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b","Type":"ContainerDied","Data":"2b2e3e6ca6758894e548e3731051e66d503701c972c66aae0ebb4abf976b6b7a"} Feb 27 08:52:58 crc kubenswrapper[4906]: I0227 08:52:58.547032 4906 scope.go:117] "RemoveContainer" containerID="d74bf3d5cb0decd432277184b904ec73a48744ba9f974b92c711385bc91ced30" Feb 27 08:52:58 crc kubenswrapper[4906]: I0227 08:52:58.830411 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:52:58 crc kubenswrapper[4906]: I0227 08:52:58.856654 4906 generic.go:334] "Generic (PLEG): container finished" podID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" containerID="8b8637fea46e9b258704b0f1109078ff6ecbf280897e1ece1c856f8a714efe13" exitCode=0 Feb 27 08:52:58 crc kubenswrapper[4906]: I0227 08:52:58.856987 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38047433-8d9f-4f90-a9fe-8a1960cc3d9f","Type":"ContainerDied","Data":"8b8637fea46e9b258704b0f1109078ff6ecbf280897e1ece1c856f8a714efe13"} Feb 27 08:52:58 crc kubenswrapper[4906]: I0227 08:52:58.857025 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"38047433-8d9f-4f90-a9fe-8a1960cc3d9f","Type":"ContainerDied","Data":"aafc0c5b7e47f3e189e1ab149a62275d8e1d143ba31458c2d8dc1e8479d14f77"} Feb 27 08:52:58 crc kubenswrapper[4906]: I0227 08:52:58.857048 4906 scope.go:117] "RemoveContainer" containerID="3b20cfcc12c3e8f44bf7d564608f1416cc432a4b43a9fa459a3284fea00ef304" Feb 27 08:52:58 crc kubenswrapper[4906]: E0227 08:52:58.875206 4906 kuberuntime_gc.go:389] "Failed to remove container log dead symlink" err="remove /var/log/containers/ceilometer-0_openstack_proxy-httpd-3b20cfcc12c3e8f44bf7d564608f1416cc432a4b43a9fa459a3284fea00ef304.log: no such file or directory" path="/var/log/containers/ceilometer-0_openstack_proxy-httpd-3b20cfcc12c3e8f44bf7d564608f1416cc432a4b43a9fa459a3284fea00ef304.log" Feb 27 08:52:58 crc kubenswrapper[4906]: I0227 08:52:58.932774 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7xgdv\" (UniqueName: \"kubernetes.io/projected/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-kube-api-access-7xgdv\") pod \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " Feb 27 08:52:58 crc kubenswrapper[4906]: I0227 08:52:58.932944 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-scripts\") pod \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " Feb 27 08:52:58 crc kubenswrapper[4906]: I0227 08:52:58.932974 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-combined-ca-bundle\") pod \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " Feb 27 08:52:58 crc kubenswrapper[4906]: I0227 08:52:58.933014 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-config-data\") pod \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " Feb 27 08:52:58 crc kubenswrapper[4906]: I0227 08:52:58.933039 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-run-httpd\") pod \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " Feb 27 08:52:58 crc kubenswrapper[4906]: I0227 08:52:58.933091 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-sg-core-conf-yaml\") pod \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " Feb 27 08:52:58 crc kubenswrapper[4906]: I0227 08:52:58.933129 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-log-httpd\") pod \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\" (UID: \"38047433-8d9f-4f90-a9fe-8a1960cc3d9f\") " Feb 27 08:52:58 crc kubenswrapper[4906]: I0227 08:52:58.934455 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "38047433-8d9f-4f90-a9fe-8a1960cc3d9f" (UID: "38047433-8d9f-4f90-a9fe-8a1960cc3d9f"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:52:58 crc kubenswrapper[4906]: I0227 08:52:58.935789 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "38047433-8d9f-4f90-a9fe-8a1960cc3d9f" (UID: "38047433-8d9f-4f90-a9fe-8a1960cc3d9f"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:52:58 crc kubenswrapper[4906]: I0227 08:52:58.962279 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-kube-api-access-7xgdv" (OuterVolumeSpecName: "kube-api-access-7xgdv") pod "38047433-8d9f-4f90-a9fe-8a1960cc3d9f" (UID: "38047433-8d9f-4f90-a9fe-8a1960cc3d9f"). InnerVolumeSpecName "kube-api-access-7xgdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:52:58 crc kubenswrapper[4906]: I0227 08:52:58.978872 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-scripts" (OuterVolumeSpecName: "scripts") pod "38047433-8d9f-4f90-a9fe-8a1960cc3d9f" (UID: "38047433-8d9f-4f90-a9fe-8a1960cc3d9f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:58 crc kubenswrapper[4906]: I0227 08:52:58.991769 4906 scope.go:117] "RemoveContainer" containerID="100e4949081ccc93386dfb0fa91ecb115987a12fb03fb26290c6b916ee97fcaf" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.010162 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "38047433-8d9f-4f90-a9fe-8a1960cc3d9f" (UID: "38047433-8d9f-4f90-a9fe-8a1960cc3d9f"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.035617 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7xgdv\" (UniqueName: \"kubernetes.io/projected/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-kube-api-access-7xgdv\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.035660 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.035672 4906 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.035681 4906 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.035689 4906 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.045255 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "38047433-8d9f-4f90-a9fe-8a1960cc3d9f" (UID: "38047433-8d9f-4f90-a9fe-8a1960cc3d9f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.055931 4906 scope.go:117] "RemoveContainer" containerID="5ac839e1a02522108fd6536ad697153cc0b81bf78d1d6422e4e01e016395997b" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.091007 4906 scope.go:117] "RemoveContainer" containerID="8b8637fea46e9b258704b0f1109078ff6ecbf280897e1ece1c856f8a714efe13" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.109182 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-config-data" (OuterVolumeSpecName: "config-data") pod "38047433-8d9f-4f90-a9fe-8a1960cc3d9f" (UID: "38047433-8d9f-4f90-a9fe-8a1960cc3d9f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.137471 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.137522 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/38047433-8d9f-4f90-a9fe-8a1960cc3d9f-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.139369 4906 scope.go:117] "RemoveContainer" containerID="3b20cfcc12c3e8f44bf7d564608f1416cc432a4b43a9fa459a3284fea00ef304" Feb 27 08:52:59 crc kubenswrapper[4906]: E0227 08:52:59.140504 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b20cfcc12c3e8f44bf7d564608f1416cc432a4b43a9fa459a3284fea00ef304\": container with ID starting with 3b20cfcc12c3e8f44bf7d564608f1416cc432a4b43a9fa459a3284fea00ef304 not found: ID does not exist" containerID="3b20cfcc12c3e8f44bf7d564608f1416cc432a4b43a9fa459a3284fea00ef304" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.140548 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b20cfcc12c3e8f44bf7d564608f1416cc432a4b43a9fa459a3284fea00ef304"} err="failed to get container status \"3b20cfcc12c3e8f44bf7d564608f1416cc432a4b43a9fa459a3284fea00ef304\": rpc error: code = NotFound desc = could not find container \"3b20cfcc12c3e8f44bf7d564608f1416cc432a4b43a9fa459a3284fea00ef304\": container with ID starting with 3b20cfcc12c3e8f44bf7d564608f1416cc432a4b43a9fa459a3284fea00ef304 not found: ID does not exist" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.140579 4906 scope.go:117] "RemoveContainer" containerID="100e4949081ccc93386dfb0fa91ecb115987a12fb03fb26290c6b916ee97fcaf" Feb 27 08:52:59 crc kubenswrapper[4906]: E0227 08:52:59.141129 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"100e4949081ccc93386dfb0fa91ecb115987a12fb03fb26290c6b916ee97fcaf\": container with ID starting with 100e4949081ccc93386dfb0fa91ecb115987a12fb03fb26290c6b916ee97fcaf not found: ID does not exist" containerID="100e4949081ccc93386dfb0fa91ecb115987a12fb03fb26290c6b916ee97fcaf" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.141164 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"100e4949081ccc93386dfb0fa91ecb115987a12fb03fb26290c6b916ee97fcaf"} err="failed to get container status \"100e4949081ccc93386dfb0fa91ecb115987a12fb03fb26290c6b916ee97fcaf\": rpc error: code = NotFound desc = could not find container \"100e4949081ccc93386dfb0fa91ecb115987a12fb03fb26290c6b916ee97fcaf\": container with ID starting with 100e4949081ccc93386dfb0fa91ecb115987a12fb03fb26290c6b916ee97fcaf not found: ID does not exist" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.141190 4906 scope.go:117] "RemoveContainer" containerID="5ac839e1a02522108fd6536ad697153cc0b81bf78d1d6422e4e01e016395997b" Feb 27 08:52:59 crc kubenswrapper[4906]: E0227 08:52:59.141558 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5ac839e1a02522108fd6536ad697153cc0b81bf78d1d6422e4e01e016395997b\": container with ID starting with 5ac839e1a02522108fd6536ad697153cc0b81bf78d1d6422e4e01e016395997b not found: ID does not exist" containerID="5ac839e1a02522108fd6536ad697153cc0b81bf78d1d6422e4e01e016395997b" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.141583 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5ac839e1a02522108fd6536ad697153cc0b81bf78d1d6422e4e01e016395997b"} err="failed to get container status \"5ac839e1a02522108fd6536ad697153cc0b81bf78d1d6422e4e01e016395997b\": rpc error: code = NotFound desc = could not find container \"5ac839e1a02522108fd6536ad697153cc0b81bf78d1d6422e4e01e016395997b\": container with ID starting with 5ac839e1a02522108fd6536ad697153cc0b81bf78d1d6422e4e01e016395997b not found: ID does not exist" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.141600 4906 scope.go:117] "RemoveContainer" containerID="8b8637fea46e9b258704b0f1109078ff6ecbf280897e1ece1c856f8a714efe13" Feb 27 08:52:59 crc kubenswrapper[4906]: E0227 08:52:59.141990 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b8637fea46e9b258704b0f1109078ff6ecbf280897e1ece1c856f8a714efe13\": container with ID starting with 8b8637fea46e9b258704b0f1109078ff6ecbf280897e1ece1c856f8a714efe13 not found: ID does not exist" containerID="8b8637fea46e9b258704b0f1109078ff6ecbf280897e1ece1c856f8a714efe13" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.142014 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b8637fea46e9b258704b0f1109078ff6ecbf280897e1ece1c856f8a714efe13"} err="failed to get container status \"8b8637fea46e9b258704b0f1109078ff6ecbf280897e1ece1c856f8a714efe13\": rpc error: code = NotFound desc = could not find container \"8b8637fea46e9b258704b0f1109078ff6ecbf280897e1ece1c856f8a714efe13\": container with ID starting with 8b8637fea46e9b258704b0f1109078ff6ecbf280897e1ece1c856f8a714efe13 not found: ID does not exist" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.198942 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-p544r" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.340366 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-config-data\") pod \"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b\" (UID: \"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b\") " Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.340958 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-48tfw\" (UniqueName: \"kubernetes.io/projected/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-kube-api-access-48tfw\") pod \"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b\" (UID: \"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b\") " Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.341160 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-scripts\") pod \"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b\" (UID: \"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b\") " Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.341251 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-combined-ca-bundle\") pod \"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b\" (UID: \"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b\") " Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.345142 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-scripts" (OuterVolumeSpecName: "scripts") pod "fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b" (UID: "fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.346158 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-kube-api-access-48tfw" (OuterVolumeSpecName: "kube-api-access-48tfw") pod "fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b" (UID: "fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b"). InnerVolumeSpecName "kube-api-access-48tfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.370184 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b" (UID: "fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.372435 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-config-data" (OuterVolumeSpecName: "config-data") pod "fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b" (UID: "fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.444378 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.444446 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.444468 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.444486 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-48tfw\" (UniqueName: \"kubernetes.io/projected/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b-kube-api-access-48tfw\") on node \"crc\" DevicePath \"\"" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.874001 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.878714 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-p544r" event={"ID":"fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b","Type":"ContainerDied","Data":"d34bc66c9b33f1ea429a866c4537c63fbb6b718fdd52fabeb6c3b22cf002cee7"} Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.878789 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d34bc66c9b33f1ea429a866c4537c63fbb6b718fdd52fabeb6c3b22cf002cee7" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.878896 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-p544r" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.921599 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.936095 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.945063 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:52:59 crc kubenswrapper[4906]: E0227 08:52:59.945496 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b" containerName="nova-cell0-conductor-db-sync" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.945513 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b" containerName="nova-cell0-conductor-db-sync" Feb 27 08:52:59 crc kubenswrapper[4906]: E0227 08:52:59.945529 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" containerName="sg-core" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.945537 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" containerName="sg-core" Feb 27 08:52:59 crc kubenswrapper[4906]: E0227 08:52:59.945550 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" containerName="ceilometer-notification-agent" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.945561 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" containerName="ceilometer-notification-agent" Feb 27 08:52:59 crc kubenswrapper[4906]: E0227 08:52:59.945580 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" containerName="proxy-httpd" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.945588 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" containerName="proxy-httpd" Feb 27 08:52:59 crc kubenswrapper[4906]: E0227 08:52:59.945615 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" containerName="ceilometer-central-agent" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.945624 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" containerName="ceilometer-central-agent" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.945805 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" containerName="ceilometer-central-agent" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.945829 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b" containerName="nova-cell0-conductor-db-sync" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.945843 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" containerName="sg-core" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.945855 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" containerName="ceilometer-notification-agent" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.945862 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" containerName="proxy-httpd" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.947576 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.954269 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.954605 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 27 08:52:59 crc kubenswrapper[4906]: I0227 08:52:59.978325 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.012928 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.040635 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.049223 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.049922 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.051245 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-ljsrk" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.057256 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.057323 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-config-data\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.057409 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4e52025-428e-46db-950c-791f38cf2895-run-httpd\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.057492 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgsw4\" (UniqueName: \"kubernetes.io/projected/e4e52025-428e-46db-950c-791f38cf2895-kube-api-access-pgsw4\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.057685 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-scripts\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.057838 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4e52025-428e-46db-950c-791f38cf2895-log-httpd\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.057985 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.160794 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fe8c767-11f8-4a04-aab7-940c1b55a7b5-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"1fe8c767-11f8-4a04-aab7-940c1b55a7b5\") " pod="openstack/nova-cell0-conductor-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.160906 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-config-data\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.161024 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fe8c767-11f8-4a04-aab7-940c1b55a7b5-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"1fe8c767-11f8-4a04-aab7-940c1b55a7b5\") " pod="openstack/nova-cell0-conductor-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.161080 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4e52025-428e-46db-950c-791f38cf2895-run-httpd\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.161124 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgsw4\" (UniqueName: \"kubernetes.io/projected/e4e52025-428e-46db-950c-791f38cf2895-kube-api-access-pgsw4\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.161232 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-scripts\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.161281 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtshs\" (UniqueName: \"kubernetes.io/projected/1fe8c767-11f8-4a04-aab7-940c1b55a7b5-kube-api-access-vtshs\") pod \"nova-cell0-conductor-0\" (UID: \"1fe8c767-11f8-4a04-aab7-940c1b55a7b5\") " pod="openstack/nova-cell0-conductor-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.161348 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4e52025-428e-46db-950c-791f38cf2895-log-httpd\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.161419 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.161500 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.161678 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4e52025-428e-46db-950c-791f38cf2895-run-httpd\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.162069 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4e52025-428e-46db-950c-791f38cf2895-log-httpd\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.167423 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-config-data\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.167922 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.168587 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.178526 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-scripts\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.180216 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgsw4\" (UniqueName: \"kubernetes.io/projected/e4e52025-428e-46db-950c-791f38cf2895-kube-api-access-pgsw4\") pod \"ceilometer-0\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.263767 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtshs\" (UniqueName: \"kubernetes.io/projected/1fe8c767-11f8-4a04-aab7-940c1b55a7b5-kube-api-access-vtshs\") pod \"nova-cell0-conductor-0\" (UID: \"1fe8c767-11f8-4a04-aab7-940c1b55a7b5\") " pod="openstack/nova-cell0-conductor-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.263969 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fe8c767-11f8-4a04-aab7-940c1b55a7b5-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"1fe8c767-11f8-4a04-aab7-940c1b55a7b5\") " pod="openstack/nova-cell0-conductor-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.264033 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fe8c767-11f8-4a04-aab7-940c1b55a7b5-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"1fe8c767-11f8-4a04-aab7-940c1b55a7b5\") " pod="openstack/nova-cell0-conductor-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.268037 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1fe8c767-11f8-4a04-aab7-940c1b55a7b5-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"1fe8c767-11f8-4a04-aab7-940c1b55a7b5\") " pod="openstack/nova-cell0-conductor-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.268845 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1fe8c767-11f8-4a04-aab7-940c1b55a7b5-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"1fe8c767-11f8-4a04-aab7-940c1b55a7b5\") " pod="openstack/nova-cell0-conductor-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.280203 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.281659 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtshs\" (UniqueName: \"kubernetes.io/projected/1fe8c767-11f8-4a04-aab7-940c1b55a7b5-kube-api-access-vtshs\") pod \"nova-cell0-conductor-0\" (UID: \"1fe8c767-11f8-4a04-aab7-940c1b55a7b5\") " pod="openstack/nova-cell0-conductor-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.359788 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.634561 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="38047433-8d9f-4f90-a9fe-8a1960cc3d9f" path="/var/lib/kubelet/pods/38047433-8d9f-4f90-a9fe-8a1960cc3d9f/volumes" Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.759965 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.774414 4906 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.895948 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4e52025-428e-46db-950c-791f38cf2895","Type":"ContainerStarted","Data":"a2e4e8b23dd8cc370e767a253ce6020795527a25909dc80ff910cf5191f85699"} Feb 27 08:53:00 crc kubenswrapper[4906]: I0227 08:53:00.994952 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Feb 27 08:53:01 crc kubenswrapper[4906]: W0227 08:53:01.008504 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1fe8c767_11f8_4a04_aab7_940c1b55a7b5.slice/crio-210bcfcb9a325adb4d5892af3b2b7612904c1b8815d1d95b23d73f3f1db406b6 WatchSource:0}: Error finding container 210bcfcb9a325adb4d5892af3b2b7612904c1b8815d1d95b23d73f3f1db406b6: Status 404 returned error can't find the container with id 210bcfcb9a325adb4d5892af3b2b7612904c1b8815d1d95b23d73f3f1db406b6 Feb 27 08:53:01 crc kubenswrapper[4906]: I0227 08:53:01.914062 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4e52025-428e-46db-950c-791f38cf2895","Type":"ContainerStarted","Data":"3307850c4df159782d3a7188196431a19b5bfe90f5166364ffa7f7ede4e2590b"} Feb 27 08:53:01 crc kubenswrapper[4906]: I0227 08:53:01.916611 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"1fe8c767-11f8-4a04-aab7-940c1b55a7b5","Type":"ContainerStarted","Data":"cf7f12bf662204a82855ccdb3875f41c716b681ea69a830ed87e4f86d182dadc"} Feb 27 08:53:01 crc kubenswrapper[4906]: I0227 08:53:01.916685 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"1fe8c767-11f8-4a04-aab7-940c1b55a7b5","Type":"ContainerStarted","Data":"210bcfcb9a325adb4d5892af3b2b7612904c1b8815d1d95b23d73f3f1db406b6"} Feb 27 08:53:01 crc kubenswrapper[4906]: I0227 08:53:01.918553 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Feb 27 08:53:01 crc kubenswrapper[4906]: I0227 08:53:01.950154 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.950122627 podStartE2EDuration="2.950122627s" podCreationTimestamp="2026-02-27 08:52:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:53:01.944220982 +0000 UTC m=+1480.338622612" watchObservedRunningTime="2026-02-27 08:53:01.950122627 +0000 UTC m=+1480.344524237" Feb 27 08:53:02 crc kubenswrapper[4906]: I0227 08:53:02.931017 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4e52025-428e-46db-950c-791f38cf2895","Type":"ContainerStarted","Data":"f71c93a50b50e04b0ad4f47ed7393383ea2dd4f80cffad8c957b77fa5312fbb1"} Feb 27 08:53:02 crc kubenswrapper[4906]: I0227 08:53:02.931448 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4e52025-428e-46db-950c-791f38cf2895","Type":"ContainerStarted","Data":"515fac5d741589707eca3835cea9e9ca69f06d6e8c7f76d4269ffc25dff37984"} Feb 27 08:53:04 crc kubenswrapper[4906]: I0227 08:53:04.968051 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4e52025-428e-46db-950c-791f38cf2895","Type":"ContainerStarted","Data":"3f76341795542d0ee376b586f5bf8ca9f81794a44a9c3b1d4460fb9a09cea1d4"} Feb 27 08:53:04 crc kubenswrapper[4906]: I0227 08:53:04.968952 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 27 08:53:05 crc kubenswrapper[4906]: I0227 08:53:05.000863 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.37186019 podStartE2EDuration="6.000835908s" podCreationTimestamp="2026-02-27 08:52:59 +0000 UTC" firstStartedPulling="2026-02-27 08:53:00.774191023 +0000 UTC m=+1479.168592633" lastFinishedPulling="2026-02-27 08:53:04.403166741 +0000 UTC m=+1482.797568351" observedRunningTime="2026-02-27 08:53:04.990479186 +0000 UTC m=+1483.384880796" watchObservedRunningTime="2026-02-27 08:53:05.000835908 +0000 UTC m=+1483.395237518" Feb 27 08:53:10 crc kubenswrapper[4906]: I0227 08:53:10.391098 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Feb 27 08:53:10 crc kubenswrapper[4906]: I0227 08:53:10.913256 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-ld4zf"] Feb 27 08:53:10 crc kubenswrapper[4906]: I0227 08:53:10.915066 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-ld4zf" Feb 27 08:53:10 crc kubenswrapper[4906]: I0227 08:53:10.918441 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Feb 27 08:53:10 crc kubenswrapper[4906]: I0227 08:53:10.918981 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Feb 27 08:53:10 crc kubenswrapper[4906]: I0227 08:53:10.932316 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-ld4zf"] Feb 27 08:53:10 crc kubenswrapper[4906]: I0227 08:53:10.998294 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jn5b4\" (UniqueName: \"kubernetes.io/projected/293f689b-eec4-4963-9036-b5fc98dcbcaa-kube-api-access-jn5b4\") pod \"nova-cell0-cell-mapping-ld4zf\" (UID: \"293f689b-eec4-4963-9036-b5fc98dcbcaa\") " pod="openstack/nova-cell0-cell-mapping-ld4zf" Feb 27 08:53:10 crc kubenswrapper[4906]: I0227 08:53:10.998377 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/293f689b-eec4-4963-9036-b5fc98dcbcaa-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-ld4zf\" (UID: \"293f689b-eec4-4963-9036-b5fc98dcbcaa\") " pod="openstack/nova-cell0-cell-mapping-ld4zf" Feb 27 08:53:10 crc kubenswrapper[4906]: I0227 08:53:10.998455 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/293f689b-eec4-4963-9036-b5fc98dcbcaa-config-data\") pod \"nova-cell0-cell-mapping-ld4zf\" (UID: \"293f689b-eec4-4963-9036-b5fc98dcbcaa\") " pod="openstack/nova-cell0-cell-mapping-ld4zf" Feb 27 08:53:10 crc kubenswrapper[4906]: I0227 08:53:10.998495 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/293f689b-eec4-4963-9036-b5fc98dcbcaa-scripts\") pod \"nova-cell0-cell-mapping-ld4zf\" (UID: \"293f689b-eec4-4963-9036-b5fc98dcbcaa\") " pod="openstack/nova-cell0-cell-mapping-ld4zf" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.100590 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/293f689b-eec4-4963-9036-b5fc98dcbcaa-config-data\") pod \"nova-cell0-cell-mapping-ld4zf\" (UID: \"293f689b-eec4-4963-9036-b5fc98dcbcaa\") " pod="openstack/nova-cell0-cell-mapping-ld4zf" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.100661 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/293f689b-eec4-4963-9036-b5fc98dcbcaa-scripts\") pod \"nova-cell0-cell-mapping-ld4zf\" (UID: \"293f689b-eec4-4963-9036-b5fc98dcbcaa\") " pod="openstack/nova-cell0-cell-mapping-ld4zf" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.100729 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jn5b4\" (UniqueName: \"kubernetes.io/projected/293f689b-eec4-4963-9036-b5fc98dcbcaa-kube-api-access-jn5b4\") pod \"nova-cell0-cell-mapping-ld4zf\" (UID: \"293f689b-eec4-4963-9036-b5fc98dcbcaa\") " pod="openstack/nova-cell0-cell-mapping-ld4zf" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.100773 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/293f689b-eec4-4963-9036-b5fc98dcbcaa-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-ld4zf\" (UID: \"293f689b-eec4-4963-9036-b5fc98dcbcaa\") " pod="openstack/nova-cell0-cell-mapping-ld4zf" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.112069 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/293f689b-eec4-4963-9036-b5fc98dcbcaa-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-ld4zf\" (UID: \"293f689b-eec4-4963-9036-b5fc98dcbcaa\") " pod="openstack/nova-cell0-cell-mapping-ld4zf" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.112140 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/293f689b-eec4-4963-9036-b5fc98dcbcaa-config-data\") pod \"nova-cell0-cell-mapping-ld4zf\" (UID: \"293f689b-eec4-4963-9036-b5fc98dcbcaa\") " pod="openstack/nova-cell0-cell-mapping-ld4zf" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.130445 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jn5b4\" (UniqueName: \"kubernetes.io/projected/293f689b-eec4-4963-9036-b5fc98dcbcaa-kube-api-access-jn5b4\") pod \"nova-cell0-cell-mapping-ld4zf\" (UID: \"293f689b-eec4-4963-9036-b5fc98dcbcaa\") " pod="openstack/nova-cell0-cell-mapping-ld4zf" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.133755 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.136627 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.137101 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/293f689b-eec4-4963-9036-b5fc98dcbcaa-scripts\") pod \"nova-cell0-cell-mapping-ld4zf\" (UID: \"293f689b-eec4-4963-9036-b5fc98dcbcaa\") " pod="openstack/nova-cell0-cell-mapping-ld4zf" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.139078 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.159542 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.205946 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dc624da-7d53-4588-9963-0db01ab501cf-config-data\") pod \"nova-api-0\" (UID: \"3dc624da-7d53-4588-9963-0db01ab501cf\") " pod="openstack/nova-api-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.206303 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dc624da-7d53-4588-9963-0db01ab501cf-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3dc624da-7d53-4588-9963-0db01ab501cf\") " pod="openstack/nova-api-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.206770 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dc624da-7d53-4588-9963-0db01ab501cf-logs\") pod \"nova-api-0\" (UID: \"3dc624da-7d53-4588-9963-0db01ab501cf\") " pod="openstack/nova-api-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.208793 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gp6dn\" (UniqueName: \"kubernetes.io/projected/3dc624da-7d53-4588-9963-0db01ab501cf-kube-api-access-gp6dn\") pod \"nova-api-0\" (UID: \"3dc624da-7d53-4588-9963-0db01ab501cf\") " pod="openstack/nova-api-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.249431 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-ld4zf" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.301649 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.304090 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.323267 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dc624da-7d53-4588-9963-0db01ab501cf-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3dc624da-7d53-4588-9963-0db01ab501cf\") " pod="openstack/nova-api-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.323407 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dc624da-7d53-4588-9963-0db01ab501cf-logs\") pod \"nova-api-0\" (UID: \"3dc624da-7d53-4588-9963-0db01ab501cf\") " pod="openstack/nova-api-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.323457 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gp6dn\" (UniqueName: \"kubernetes.io/projected/3dc624da-7d53-4588-9963-0db01ab501cf-kube-api-access-gp6dn\") pod \"nova-api-0\" (UID: \"3dc624da-7d53-4588-9963-0db01ab501cf\") " pod="openstack/nova-api-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.323543 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dc624da-7d53-4588-9963-0db01ab501cf-config-data\") pod \"nova-api-0\" (UID: \"3dc624da-7d53-4588-9963-0db01ab501cf\") " pod="openstack/nova-api-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.325681 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dc624da-7d53-4588-9963-0db01ab501cf-logs\") pod \"nova-api-0\" (UID: \"3dc624da-7d53-4588-9963-0db01ab501cf\") " pod="openstack/nova-api-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.338739 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dc624da-7d53-4588-9963-0db01ab501cf-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"3dc624da-7d53-4588-9963-0db01ab501cf\") " pod="openstack/nova-api-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.342455 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.380665 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dc624da-7d53-4588-9963-0db01ab501cf-config-data\") pod \"nova-api-0\" (UID: \"3dc624da-7d53-4588-9963-0db01ab501cf\") " pod="openstack/nova-api-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.381177 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.383736 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gp6dn\" (UniqueName: \"kubernetes.io/projected/3dc624da-7d53-4588-9963-0db01ab501cf-kube-api-access-gp6dn\") pod \"nova-api-0\" (UID: \"3dc624da-7d53-4588-9963-0db01ab501cf\") " pod="openstack/nova-api-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.425195 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b812e4ee-589d-42ec-b2b4-c57bb24520de-config-data\") pod \"nova-scheduler-0\" (UID: \"b812e4ee-589d-42ec-b2b4-c57bb24520de\") " pod="openstack/nova-scheduler-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.425253 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sgr7\" (UniqueName: \"kubernetes.io/projected/b812e4ee-589d-42ec-b2b4-c57bb24520de-kube-api-access-9sgr7\") pod \"nova-scheduler-0\" (UID: \"b812e4ee-589d-42ec-b2b4-c57bb24520de\") " pod="openstack/nova-scheduler-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.425308 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b812e4ee-589d-42ec-b2b4-c57bb24520de-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b812e4ee-589d-42ec-b2b4-c57bb24520de\") " pod="openstack/nova-scheduler-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.430733 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.447183 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.450596 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.463516 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.529649 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b812e4ee-589d-42ec-b2b4-c57bb24520de-config-data\") pod \"nova-scheduler-0\" (UID: \"b812e4ee-589d-42ec-b2b4-c57bb24520de\") " pod="openstack/nova-scheduler-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.530141 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sgr7\" (UniqueName: \"kubernetes.io/projected/b812e4ee-589d-42ec-b2b4-c57bb24520de-kube-api-access-9sgr7\") pod \"nova-scheduler-0\" (UID: \"b812e4ee-589d-42ec-b2b4-c57bb24520de\") " pod="openstack/nova-scheduler-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.530284 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fz74c\" (UniqueName: \"kubernetes.io/projected/02339013-9f1f-433a-8501-c1c62078eac2-kube-api-access-fz74c\") pod \"nova-metadata-0\" (UID: \"02339013-9f1f-433a-8501-c1c62078eac2\") " pod="openstack/nova-metadata-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.530455 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02339013-9f1f-433a-8501-c1c62078eac2-config-data\") pod \"nova-metadata-0\" (UID: \"02339013-9f1f-433a-8501-c1c62078eac2\") " pod="openstack/nova-metadata-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.531850 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02339013-9f1f-433a-8501-c1c62078eac2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"02339013-9f1f-433a-8501-c1c62078eac2\") " pod="openstack/nova-metadata-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.532084 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b812e4ee-589d-42ec-b2b4-c57bb24520de-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b812e4ee-589d-42ec-b2b4-c57bb24520de\") " pod="openstack/nova-scheduler-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.532181 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02339013-9f1f-433a-8501-c1c62078eac2-logs\") pod \"nova-metadata-0\" (UID: \"02339013-9f1f-433a-8501-c1c62078eac2\") " pod="openstack/nova-metadata-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.537588 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b812e4ee-589d-42ec-b2b4-c57bb24520de-config-data\") pod \"nova-scheduler-0\" (UID: \"b812e4ee-589d-42ec-b2b4-c57bb24520de\") " pod="openstack/nova-scheduler-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.545738 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b812e4ee-589d-42ec-b2b4-c57bb24520de-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"b812e4ee-589d-42ec-b2b4-c57bb24520de\") " pod="openstack/nova-scheduler-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.576044 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.590236 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sgr7\" (UniqueName: \"kubernetes.io/projected/b812e4ee-589d-42ec-b2b4-c57bb24520de-kube-api-access-9sgr7\") pod \"nova-scheduler-0\" (UID: \"b812e4ee-589d-42ec-b2b4-c57bb24520de\") " pod="openstack/nova-scheduler-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.602356 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.604552 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.618555 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.619367 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.634336 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fz74c\" (UniqueName: \"kubernetes.io/projected/02339013-9f1f-433a-8501-c1c62078eac2-kube-api-access-fz74c\") pod \"nova-metadata-0\" (UID: \"02339013-9f1f-433a-8501-c1c62078eac2\") " pod="openstack/nova-metadata-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.634406 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02339013-9f1f-433a-8501-c1c62078eac2-config-data\") pod \"nova-metadata-0\" (UID: \"02339013-9f1f-433a-8501-c1c62078eac2\") " pod="openstack/nova-metadata-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.634452 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02339013-9f1f-433a-8501-c1c62078eac2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"02339013-9f1f-433a-8501-c1c62078eac2\") " pod="openstack/nova-metadata-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.634516 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02339013-9f1f-433a-8501-c1c62078eac2-logs\") pod \"nova-metadata-0\" (UID: \"02339013-9f1f-433a-8501-c1c62078eac2\") " pod="openstack/nova-metadata-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.634569 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j958f\" (UniqueName: \"kubernetes.io/projected/f12ff417-46e1-4950-9e28-9a41afadd152-kube-api-access-j958f\") pod \"nova-cell1-novncproxy-0\" (UID: \"f12ff417-46e1-4950-9e28-9a41afadd152\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.634602 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f12ff417-46e1-4950-9e28-9a41afadd152-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"f12ff417-46e1-4950-9e28-9a41afadd152\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.634903 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f12ff417-46e1-4950-9e28-9a41afadd152-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"f12ff417-46e1-4950-9e28-9a41afadd152\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.639590 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02339013-9f1f-433a-8501-c1c62078eac2-logs\") pod \"nova-metadata-0\" (UID: \"02339013-9f1f-433a-8501-c1c62078eac2\") " pod="openstack/nova-metadata-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.646994 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02339013-9f1f-433a-8501-c1c62078eac2-config-data\") pod \"nova-metadata-0\" (UID: \"02339013-9f1f-433a-8501-c1c62078eac2\") " pod="openstack/nova-metadata-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.655958 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02339013-9f1f-433a-8501-c1c62078eac2-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"02339013-9f1f-433a-8501-c1c62078eac2\") " pod="openstack/nova-metadata-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.676850 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-699d7ddff-r4mqm"] Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.680441 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fz74c\" (UniqueName: \"kubernetes.io/projected/02339013-9f1f-433a-8501-c1c62078eac2-kube-api-access-fz74c\") pod \"nova-metadata-0\" (UID: \"02339013-9f1f-433a-8501-c1c62078eac2\") " pod="openstack/nova-metadata-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.686208 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.695661 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-699d7ddff-r4mqm"] Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.738874 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-config\") pod \"dnsmasq-dns-699d7ddff-r4mqm\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.738966 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j958f\" (UniqueName: \"kubernetes.io/projected/f12ff417-46e1-4950-9e28-9a41afadd152-kube-api-access-j958f\") pod \"nova-cell1-novncproxy-0\" (UID: \"f12ff417-46e1-4950-9e28-9a41afadd152\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.738998 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f12ff417-46e1-4950-9e28-9a41afadd152-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"f12ff417-46e1-4950-9e28-9a41afadd152\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.739136 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-dns-svc\") pod \"dnsmasq-dns-699d7ddff-r4mqm\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.739163 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-ovsdbserver-sb\") pod \"dnsmasq-dns-699d7ddff-r4mqm\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.740409 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jn86p\" (UniqueName: \"kubernetes.io/projected/b09dc78d-c1b1-4370-938c-49fc82324733-kube-api-access-jn86p\") pod \"dnsmasq-dns-699d7ddff-r4mqm\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.740630 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f12ff417-46e1-4950-9e28-9a41afadd152-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"f12ff417-46e1-4950-9e28-9a41afadd152\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.740681 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-ovsdbserver-nb\") pod \"dnsmasq-dns-699d7ddff-r4mqm\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.740704 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-dns-swift-storage-0\") pod \"dnsmasq-dns-699d7ddff-r4mqm\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.750433 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f12ff417-46e1-4950-9e28-9a41afadd152-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"f12ff417-46e1-4950-9e28-9a41afadd152\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.752804 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f12ff417-46e1-4950-9e28-9a41afadd152-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"f12ff417-46e1-4950-9e28-9a41afadd152\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.763450 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j958f\" (UniqueName: \"kubernetes.io/projected/f12ff417-46e1-4950-9e28-9a41afadd152-kube-api-access-j958f\") pod \"nova-cell1-novncproxy-0\" (UID: \"f12ff417-46e1-4950-9e28-9a41afadd152\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.803872 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.841531 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.847670 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-dns-svc\") pod \"dnsmasq-dns-699d7ddff-r4mqm\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.849104 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-dns-svc\") pod \"dnsmasq-dns-699d7ddff-r4mqm\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.850285 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-ovsdbserver-sb\") pod \"dnsmasq-dns-699d7ddff-r4mqm\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.850464 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jn86p\" (UniqueName: \"kubernetes.io/projected/b09dc78d-c1b1-4370-938c-49fc82324733-kube-api-access-jn86p\") pod \"dnsmasq-dns-699d7ddff-r4mqm\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.851313 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-ovsdbserver-nb\") pod \"dnsmasq-dns-699d7ddff-r4mqm\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.851340 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-dns-swift-storage-0\") pod \"dnsmasq-dns-699d7ddff-r4mqm\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.852671 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-ovsdbserver-sb\") pod \"dnsmasq-dns-699d7ddff-r4mqm\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.852766 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-config\") pod \"dnsmasq-dns-699d7ddff-r4mqm\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.853129 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-ovsdbserver-nb\") pod \"dnsmasq-dns-699d7ddff-r4mqm\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.853620 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-dns-swift-storage-0\") pod \"dnsmasq-dns-699d7ddff-r4mqm\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.854224 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-config\") pod \"dnsmasq-dns-699d7ddff-r4mqm\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.878232 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jn86p\" (UniqueName: \"kubernetes.io/projected/b09dc78d-c1b1-4370-938c-49fc82324733-kube-api-access-jn86p\") pod \"dnsmasq-dns-699d7ddff-r4mqm\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:11 crc kubenswrapper[4906]: I0227 08:53:11.980375 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.023039 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-ld4zf"] Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.028661 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.069671 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-ld4zf" event={"ID":"293f689b-eec4-4963-9036-b5fc98dcbcaa","Type":"ContainerStarted","Data":"1776dd6488a1dbee31350c2a5113ed12fb585becfa5cb6d9c792547ef35ad115"} Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.192347 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.348225 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-n9r82"] Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.349861 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-n9r82" Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.352828 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.352901 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.379520 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-n9r82"] Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.496991 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcd22869-73ce-4c75-8628-2bf971da33d5-config-data\") pod \"nova-cell1-conductor-db-sync-n9r82\" (UID: \"bcd22869-73ce-4c75-8628-2bf971da33d5\") " pod="openstack/nova-cell1-conductor-db-sync-n9r82" Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.497135 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdg6w\" (UniqueName: \"kubernetes.io/projected/bcd22869-73ce-4c75-8628-2bf971da33d5-kube-api-access-gdg6w\") pod \"nova-cell1-conductor-db-sync-n9r82\" (UID: \"bcd22869-73ce-4c75-8628-2bf971da33d5\") " pod="openstack/nova-cell1-conductor-db-sync-n9r82" Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.497212 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcd22869-73ce-4c75-8628-2bf971da33d5-scripts\") pod \"nova-cell1-conductor-db-sync-n9r82\" (UID: \"bcd22869-73ce-4c75-8628-2bf971da33d5\") " pod="openstack/nova-cell1-conductor-db-sync-n9r82" Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.497256 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcd22869-73ce-4c75-8628-2bf971da33d5-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-n9r82\" (UID: \"bcd22869-73ce-4c75-8628-2bf971da33d5\") " pod="openstack/nova-cell1-conductor-db-sync-n9r82" Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.510926 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.600203 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdg6w\" (UniqueName: \"kubernetes.io/projected/bcd22869-73ce-4c75-8628-2bf971da33d5-kube-api-access-gdg6w\") pod \"nova-cell1-conductor-db-sync-n9r82\" (UID: \"bcd22869-73ce-4c75-8628-2bf971da33d5\") " pod="openstack/nova-cell1-conductor-db-sync-n9r82" Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.600283 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcd22869-73ce-4c75-8628-2bf971da33d5-scripts\") pod \"nova-cell1-conductor-db-sync-n9r82\" (UID: \"bcd22869-73ce-4c75-8628-2bf971da33d5\") " pod="openstack/nova-cell1-conductor-db-sync-n9r82" Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.600324 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcd22869-73ce-4c75-8628-2bf971da33d5-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-n9r82\" (UID: \"bcd22869-73ce-4c75-8628-2bf971da33d5\") " pod="openstack/nova-cell1-conductor-db-sync-n9r82" Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.600365 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcd22869-73ce-4c75-8628-2bf971da33d5-config-data\") pod \"nova-cell1-conductor-db-sync-n9r82\" (UID: \"bcd22869-73ce-4c75-8628-2bf971da33d5\") " pod="openstack/nova-cell1-conductor-db-sync-n9r82" Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.643339 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcd22869-73ce-4c75-8628-2bf971da33d5-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-n9r82\" (UID: \"bcd22869-73ce-4c75-8628-2bf971da33d5\") " pod="openstack/nova-cell1-conductor-db-sync-n9r82" Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.665021 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcd22869-73ce-4c75-8628-2bf971da33d5-scripts\") pod \"nova-cell1-conductor-db-sync-n9r82\" (UID: \"bcd22869-73ce-4c75-8628-2bf971da33d5\") " pod="openstack/nova-cell1-conductor-db-sync-n9r82" Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.669780 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdg6w\" (UniqueName: \"kubernetes.io/projected/bcd22869-73ce-4c75-8628-2bf971da33d5-kube-api-access-gdg6w\") pod \"nova-cell1-conductor-db-sync-n9r82\" (UID: \"bcd22869-73ce-4c75-8628-2bf971da33d5\") " pod="openstack/nova-cell1-conductor-db-sync-n9r82" Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.673727 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcd22869-73ce-4c75-8628-2bf971da33d5-config-data\") pod \"nova-cell1-conductor-db-sync-n9r82\" (UID: \"bcd22869-73ce-4c75-8628-2bf971da33d5\") " pod="openstack/nova-cell1-conductor-db-sync-n9r82" Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.715566 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-n9r82" Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.802102 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 27 08:53:12 crc kubenswrapper[4906]: I0227 08:53:12.993236 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 27 08:53:13 crc kubenswrapper[4906]: W0227 08:53:13.010612 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf12ff417_46e1_4950_9e28_9a41afadd152.slice/crio-d5b493d88fb6d0ce2397e784109c432ebaa69e095d4485cdeccae412b2830225 WatchSource:0}: Error finding container d5b493d88fb6d0ce2397e784109c432ebaa69e095d4485cdeccae412b2830225: Status 404 returned error can't find the container with id d5b493d88fb6d0ce2397e784109c432ebaa69e095d4485cdeccae412b2830225 Feb 27 08:53:13 crc kubenswrapper[4906]: I0227 08:53:13.092450 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"02339013-9f1f-433a-8501-c1c62078eac2","Type":"ContainerStarted","Data":"2f8af969282803e44c575beb40c5cd2cd600d922373905728f5c946e44691e4b"} Feb 27 08:53:13 crc kubenswrapper[4906]: I0227 08:53:13.097268 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-ld4zf" event={"ID":"293f689b-eec4-4963-9036-b5fc98dcbcaa","Type":"ContainerStarted","Data":"4f161e1ad93059ba80d68c27e36baaf34205e7be85744e29f0c874ca877875a9"} Feb 27 08:53:13 crc kubenswrapper[4906]: I0227 08:53:13.125720 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f12ff417-46e1-4950-9e28-9a41afadd152","Type":"ContainerStarted","Data":"d5b493d88fb6d0ce2397e784109c432ebaa69e095d4485cdeccae412b2830225"} Feb 27 08:53:13 crc kubenswrapper[4906]: I0227 08:53:13.128834 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3dc624da-7d53-4588-9963-0db01ab501cf","Type":"ContainerStarted","Data":"22ac00e28b795db1cae47fb060a3bf77aeaaa8237218023d584272640e7d93f1"} Feb 27 08:53:13 crc kubenswrapper[4906]: I0227 08:53:13.137335 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-ld4zf" podStartSLOduration=3.137312367 podStartE2EDuration="3.137312367s" podCreationTimestamp="2026-02-27 08:53:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:53:13.128698921 +0000 UTC m=+1491.523100531" watchObservedRunningTime="2026-02-27 08:53:13.137312367 +0000 UTC m=+1491.531713977" Feb 27 08:53:13 crc kubenswrapper[4906]: I0227 08:53:13.150234 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b812e4ee-589d-42ec-b2b4-c57bb24520de","Type":"ContainerStarted","Data":"f37901d83ec59d93cdcf25be953283ddd9155066f0fbdacac30f5dce55904d60"} Feb 27 08:53:13 crc kubenswrapper[4906]: I0227 08:53:13.171077 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-699d7ddff-r4mqm"] Feb 27 08:53:13 crc kubenswrapper[4906]: I0227 08:53:13.356184 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-n9r82"] Feb 27 08:53:13 crc kubenswrapper[4906]: W0227 08:53:13.358979 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbcd22869_73ce_4c75_8628_2bf971da33d5.slice/crio-dac167bb6d2d4d90d91364a9501f09e9691b4a3dfccc5b497d87d134dd65eb64 WatchSource:0}: Error finding container dac167bb6d2d4d90d91364a9501f09e9691b4a3dfccc5b497d87d134dd65eb64: Status 404 returned error can't find the container with id dac167bb6d2d4d90d91364a9501f09e9691b4a3dfccc5b497d87d134dd65eb64 Feb 27 08:53:14 crc kubenswrapper[4906]: I0227 08:53:14.165785 4906 generic.go:334] "Generic (PLEG): container finished" podID="b09dc78d-c1b1-4370-938c-49fc82324733" containerID="7b9dbc7d7615ae7aeffedbaa8a65cbdc327ea82d656cb71aae1751864d1a9bd2" exitCode=0 Feb 27 08:53:14 crc kubenswrapper[4906]: I0227 08:53:14.167203 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" event={"ID":"b09dc78d-c1b1-4370-938c-49fc82324733","Type":"ContainerDied","Data":"7b9dbc7d7615ae7aeffedbaa8a65cbdc327ea82d656cb71aae1751864d1a9bd2"} Feb 27 08:53:14 crc kubenswrapper[4906]: I0227 08:53:14.167241 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" event={"ID":"b09dc78d-c1b1-4370-938c-49fc82324733","Type":"ContainerStarted","Data":"16c281be24153d24a454a4a6cb15d8009d12dece28cc224b78b74653d9d4d8d0"} Feb 27 08:53:14 crc kubenswrapper[4906]: I0227 08:53:14.187760 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-n9r82" event={"ID":"bcd22869-73ce-4c75-8628-2bf971da33d5","Type":"ContainerStarted","Data":"155a4e047dac2c401b8f299eb631cf5e030fa9aa5a6ce35a4e8ed5a7b5c28f94"} Feb 27 08:53:14 crc kubenswrapper[4906]: I0227 08:53:14.187858 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-n9r82" event={"ID":"bcd22869-73ce-4c75-8628-2bf971da33d5","Type":"ContainerStarted","Data":"dac167bb6d2d4d90d91364a9501f09e9691b4a3dfccc5b497d87d134dd65eb64"} Feb 27 08:53:15 crc kubenswrapper[4906]: I0227 08:53:15.026904 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-n9r82" podStartSLOduration=3.026859362 podStartE2EDuration="3.026859362s" podCreationTimestamp="2026-02-27 08:53:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:53:14.230292152 +0000 UTC m=+1492.624693762" watchObservedRunningTime="2026-02-27 08:53:15.026859362 +0000 UTC m=+1493.421260972" Feb 27 08:53:15 crc kubenswrapper[4906]: I0227 08:53:15.028800 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:53:15 crc kubenswrapper[4906]: I0227 08:53:15.070651 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 27 08:53:17 crc kubenswrapper[4906]: I0227 08:53:17.237456 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b812e4ee-589d-42ec-b2b4-c57bb24520de","Type":"ContainerStarted","Data":"917554ca1b9760f7773682e1b02edbbf360410c80e1a42bcc88cbd7923866fe2"} Feb 27 08:53:17 crc kubenswrapper[4906]: I0227 08:53:17.244552 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"02339013-9f1f-433a-8501-c1c62078eac2","Type":"ContainerStarted","Data":"74b90593b874af99584a0616bfa864670fd4c3f37f9af5ea4ddd7ec57676a7a5"} Feb 27 08:53:17 crc kubenswrapper[4906]: I0227 08:53:17.247367 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f12ff417-46e1-4950-9e28-9a41afadd152","Type":"ContainerStarted","Data":"056ca9574bf8e8dd58188f5a3c93e0ff05838ff85ef86b5af5672bea195c4055"} Feb 27 08:53:17 crc kubenswrapper[4906]: I0227 08:53:17.247379 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="f12ff417-46e1-4950-9e28-9a41afadd152" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://056ca9574bf8e8dd58188f5a3c93e0ff05838ff85ef86b5af5672bea195c4055" gracePeriod=30 Feb 27 08:53:17 crc kubenswrapper[4906]: I0227 08:53:17.251100 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3dc624da-7d53-4588-9963-0db01ab501cf","Type":"ContainerStarted","Data":"9b35daa687f175c3ecbdd4fc565e08e63d823d2088c43e55069f5fc4657b63d8"} Feb 27 08:53:17 crc kubenswrapper[4906]: I0227 08:53:17.254469 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" event={"ID":"b09dc78d-c1b1-4370-938c-49fc82324733","Type":"ContainerStarted","Data":"f1d3a957a25ff1140f21d57e8608430db4a9d06f35e3a7f83092b423b204a19d"} Feb 27 08:53:17 crc kubenswrapper[4906]: I0227 08:53:17.255178 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:17 crc kubenswrapper[4906]: I0227 08:53:17.271318 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.506675117 podStartE2EDuration="6.271293238s" podCreationTimestamp="2026-02-27 08:53:11 +0000 UTC" firstStartedPulling="2026-02-27 08:53:12.810053252 +0000 UTC m=+1491.204454862" lastFinishedPulling="2026-02-27 08:53:16.574671363 +0000 UTC m=+1494.969072983" observedRunningTime="2026-02-27 08:53:17.261791749 +0000 UTC m=+1495.656193359" watchObservedRunningTime="2026-02-27 08:53:17.271293238 +0000 UTC m=+1495.665694848" Feb 27 08:53:17 crc kubenswrapper[4906]: I0227 08:53:17.289345 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" podStartSLOduration=6.289324802 podStartE2EDuration="6.289324802s" podCreationTimestamp="2026-02-27 08:53:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:53:17.287464443 +0000 UTC m=+1495.681866043" watchObservedRunningTime="2026-02-27 08:53:17.289324802 +0000 UTC m=+1495.683726412" Feb 27 08:53:17 crc kubenswrapper[4906]: I0227 08:53:17.314454 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.754095146 podStartE2EDuration="6.314422851s" podCreationTimestamp="2026-02-27 08:53:11 +0000 UTC" firstStartedPulling="2026-02-27 08:53:13.01366337 +0000 UTC m=+1491.408064970" lastFinishedPulling="2026-02-27 08:53:16.573991065 +0000 UTC m=+1494.968392675" observedRunningTime="2026-02-27 08:53:17.305835445 +0000 UTC m=+1495.700237065" watchObservedRunningTime="2026-02-27 08:53:17.314422851 +0000 UTC m=+1495.708824461" Feb 27 08:53:18 crc kubenswrapper[4906]: I0227 08:53:18.267197 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3dc624da-7d53-4588-9963-0db01ab501cf","Type":"ContainerStarted","Data":"b39c9bfcb5b446991e94a358e7100f9f8bdc72d5b27edcef064189c1c479f1d5"} Feb 27 08:53:18 crc kubenswrapper[4906]: I0227 08:53:18.273302 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="02339013-9f1f-433a-8501-c1c62078eac2" containerName="nova-metadata-log" containerID="cri-o://74b90593b874af99584a0616bfa864670fd4c3f37f9af5ea4ddd7ec57676a7a5" gracePeriod=30 Feb 27 08:53:18 crc kubenswrapper[4906]: I0227 08:53:18.273657 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"02339013-9f1f-433a-8501-c1c62078eac2","Type":"ContainerStarted","Data":"780ba02e7863cd72a3a5d21147de079d1664a65e3d9a4c7ec730869c4b65260c"} Feb 27 08:53:18 crc kubenswrapper[4906]: I0227 08:53:18.274354 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="02339013-9f1f-433a-8501-c1c62078eac2" containerName="nova-metadata-metadata" containerID="cri-o://780ba02e7863cd72a3a5d21147de079d1664a65e3d9a4c7ec730869c4b65260c" gracePeriod=30 Feb 27 08:53:18 crc kubenswrapper[4906]: I0227 08:53:18.301008 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.943382526 podStartE2EDuration="7.30097964s" podCreationTimestamp="2026-02-27 08:53:11 +0000 UTC" firstStartedPulling="2026-02-27 08:53:12.211508732 +0000 UTC m=+1490.605910342" lastFinishedPulling="2026-02-27 08:53:16.569105846 +0000 UTC m=+1494.963507456" observedRunningTime="2026-02-27 08:53:18.290758812 +0000 UTC m=+1496.685160422" watchObservedRunningTime="2026-02-27 08:53:18.30097964 +0000 UTC m=+1496.695381250" Feb 27 08:53:18 crc kubenswrapper[4906]: I0227 08:53:18.325094 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.290578925 podStartE2EDuration="7.325068403s" podCreationTimestamp="2026-02-27 08:53:11 +0000 UTC" firstStartedPulling="2026-02-27 08:53:12.539167798 +0000 UTC m=+1490.933569408" lastFinishedPulling="2026-02-27 08:53:16.573657276 +0000 UTC m=+1494.968058886" observedRunningTime="2026-02-27 08:53:18.317144825 +0000 UTC m=+1496.711546445" watchObservedRunningTime="2026-02-27 08:53:18.325068403 +0000 UTC m=+1496.719470003" Feb 27 08:53:18 crc kubenswrapper[4906]: I0227 08:53:18.965363 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.094220 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02339013-9f1f-433a-8501-c1c62078eac2-config-data\") pod \"02339013-9f1f-433a-8501-c1c62078eac2\" (UID: \"02339013-9f1f-433a-8501-c1c62078eac2\") " Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.094369 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fz74c\" (UniqueName: \"kubernetes.io/projected/02339013-9f1f-433a-8501-c1c62078eac2-kube-api-access-fz74c\") pod \"02339013-9f1f-433a-8501-c1c62078eac2\" (UID: \"02339013-9f1f-433a-8501-c1c62078eac2\") " Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.095302 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02339013-9f1f-433a-8501-c1c62078eac2-logs\") pod \"02339013-9f1f-433a-8501-c1c62078eac2\" (UID: \"02339013-9f1f-433a-8501-c1c62078eac2\") " Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.095361 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02339013-9f1f-433a-8501-c1c62078eac2-combined-ca-bundle\") pod \"02339013-9f1f-433a-8501-c1c62078eac2\" (UID: \"02339013-9f1f-433a-8501-c1c62078eac2\") " Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.096283 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/02339013-9f1f-433a-8501-c1c62078eac2-logs" (OuterVolumeSpecName: "logs") pod "02339013-9f1f-433a-8501-c1c62078eac2" (UID: "02339013-9f1f-433a-8501-c1c62078eac2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.102122 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02339013-9f1f-433a-8501-c1c62078eac2-kube-api-access-fz74c" (OuterVolumeSpecName: "kube-api-access-fz74c") pod "02339013-9f1f-433a-8501-c1c62078eac2" (UID: "02339013-9f1f-433a-8501-c1c62078eac2"). InnerVolumeSpecName "kube-api-access-fz74c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.125114 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02339013-9f1f-433a-8501-c1c62078eac2-config-data" (OuterVolumeSpecName: "config-data") pod "02339013-9f1f-433a-8501-c1c62078eac2" (UID: "02339013-9f1f-433a-8501-c1c62078eac2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.142131 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02339013-9f1f-433a-8501-c1c62078eac2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "02339013-9f1f-433a-8501-c1c62078eac2" (UID: "02339013-9f1f-433a-8501-c1c62078eac2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.198120 4906 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/02339013-9f1f-433a-8501-c1c62078eac2-logs\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.198160 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02339013-9f1f-433a-8501-c1c62078eac2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.198171 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02339013-9f1f-433a-8501-c1c62078eac2-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.198180 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fz74c\" (UniqueName: \"kubernetes.io/projected/02339013-9f1f-433a-8501-c1c62078eac2-kube-api-access-fz74c\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.289484 4906 generic.go:334] "Generic (PLEG): container finished" podID="02339013-9f1f-433a-8501-c1c62078eac2" containerID="780ba02e7863cd72a3a5d21147de079d1664a65e3d9a4c7ec730869c4b65260c" exitCode=0 Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.289533 4906 generic.go:334] "Generic (PLEG): container finished" podID="02339013-9f1f-433a-8501-c1c62078eac2" containerID="74b90593b874af99584a0616bfa864670fd4c3f37f9af5ea4ddd7ec57676a7a5" exitCode=143 Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.289631 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.289702 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"02339013-9f1f-433a-8501-c1c62078eac2","Type":"ContainerDied","Data":"780ba02e7863cd72a3a5d21147de079d1664a65e3d9a4c7ec730869c4b65260c"} Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.289761 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"02339013-9f1f-433a-8501-c1c62078eac2","Type":"ContainerDied","Data":"74b90593b874af99584a0616bfa864670fd4c3f37f9af5ea4ddd7ec57676a7a5"} Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.289779 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"02339013-9f1f-433a-8501-c1c62078eac2","Type":"ContainerDied","Data":"2f8af969282803e44c575beb40c5cd2cd600d922373905728f5c946e44691e4b"} Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.289803 4906 scope.go:117] "RemoveContainer" containerID="780ba02e7863cd72a3a5d21147de079d1664a65e3d9a4c7ec730869c4b65260c" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.325208 4906 scope.go:117] "RemoveContainer" containerID="74b90593b874af99584a0616bfa864670fd4c3f37f9af5ea4ddd7ec57676a7a5" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.344273 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.356705 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.358224 4906 scope.go:117] "RemoveContainer" containerID="780ba02e7863cd72a3a5d21147de079d1664a65e3d9a4c7ec730869c4b65260c" Feb 27 08:53:19 crc kubenswrapper[4906]: E0227 08:53:19.361287 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"780ba02e7863cd72a3a5d21147de079d1664a65e3d9a4c7ec730869c4b65260c\": container with ID starting with 780ba02e7863cd72a3a5d21147de079d1664a65e3d9a4c7ec730869c4b65260c not found: ID does not exist" containerID="780ba02e7863cd72a3a5d21147de079d1664a65e3d9a4c7ec730869c4b65260c" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.361330 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"780ba02e7863cd72a3a5d21147de079d1664a65e3d9a4c7ec730869c4b65260c"} err="failed to get container status \"780ba02e7863cd72a3a5d21147de079d1664a65e3d9a4c7ec730869c4b65260c\": rpc error: code = NotFound desc = could not find container \"780ba02e7863cd72a3a5d21147de079d1664a65e3d9a4c7ec730869c4b65260c\": container with ID starting with 780ba02e7863cd72a3a5d21147de079d1664a65e3d9a4c7ec730869c4b65260c not found: ID does not exist" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.361364 4906 scope.go:117] "RemoveContainer" containerID="74b90593b874af99584a0616bfa864670fd4c3f37f9af5ea4ddd7ec57676a7a5" Feb 27 08:53:19 crc kubenswrapper[4906]: E0227 08:53:19.361991 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74b90593b874af99584a0616bfa864670fd4c3f37f9af5ea4ddd7ec57676a7a5\": container with ID starting with 74b90593b874af99584a0616bfa864670fd4c3f37f9af5ea4ddd7ec57676a7a5 not found: ID does not exist" containerID="74b90593b874af99584a0616bfa864670fd4c3f37f9af5ea4ddd7ec57676a7a5" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.362022 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74b90593b874af99584a0616bfa864670fd4c3f37f9af5ea4ddd7ec57676a7a5"} err="failed to get container status \"74b90593b874af99584a0616bfa864670fd4c3f37f9af5ea4ddd7ec57676a7a5\": rpc error: code = NotFound desc = could not find container \"74b90593b874af99584a0616bfa864670fd4c3f37f9af5ea4ddd7ec57676a7a5\": container with ID starting with 74b90593b874af99584a0616bfa864670fd4c3f37f9af5ea4ddd7ec57676a7a5 not found: ID does not exist" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.362044 4906 scope.go:117] "RemoveContainer" containerID="780ba02e7863cd72a3a5d21147de079d1664a65e3d9a4c7ec730869c4b65260c" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.364156 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"780ba02e7863cd72a3a5d21147de079d1664a65e3d9a4c7ec730869c4b65260c"} err="failed to get container status \"780ba02e7863cd72a3a5d21147de079d1664a65e3d9a4c7ec730869c4b65260c\": rpc error: code = NotFound desc = could not find container \"780ba02e7863cd72a3a5d21147de079d1664a65e3d9a4c7ec730869c4b65260c\": container with ID starting with 780ba02e7863cd72a3a5d21147de079d1664a65e3d9a4c7ec730869c4b65260c not found: ID does not exist" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.364186 4906 scope.go:117] "RemoveContainer" containerID="74b90593b874af99584a0616bfa864670fd4c3f37f9af5ea4ddd7ec57676a7a5" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.364536 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74b90593b874af99584a0616bfa864670fd4c3f37f9af5ea4ddd7ec57676a7a5"} err="failed to get container status \"74b90593b874af99584a0616bfa864670fd4c3f37f9af5ea4ddd7ec57676a7a5\": rpc error: code = NotFound desc = could not find container \"74b90593b874af99584a0616bfa864670fd4c3f37f9af5ea4ddd7ec57676a7a5\": container with ID starting with 74b90593b874af99584a0616bfa864670fd4c3f37f9af5ea4ddd7ec57676a7a5 not found: ID does not exist" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.369228 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:53:19 crc kubenswrapper[4906]: E0227 08:53:19.369725 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02339013-9f1f-433a-8501-c1c62078eac2" containerName="nova-metadata-metadata" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.369746 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="02339013-9f1f-433a-8501-c1c62078eac2" containerName="nova-metadata-metadata" Feb 27 08:53:19 crc kubenswrapper[4906]: E0227 08:53:19.369755 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02339013-9f1f-433a-8501-c1c62078eac2" containerName="nova-metadata-log" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.369763 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="02339013-9f1f-433a-8501-c1c62078eac2" containerName="nova-metadata-log" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.374316 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="02339013-9f1f-433a-8501-c1c62078eac2" containerName="nova-metadata-log" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.374374 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="02339013-9f1f-433a-8501-c1c62078eac2" containerName="nova-metadata-metadata" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.376004 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.380839 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.381168 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.395116 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.504010 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4996a48c-a138-408e-b5cd-531962d725f8-logs\") pod \"nova-metadata-0\" (UID: \"4996a48c-a138-408e-b5cd-531962d725f8\") " pod="openstack/nova-metadata-0" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.504065 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jsrjk\" (UniqueName: \"kubernetes.io/projected/4996a48c-a138-408e-b5cd-531962d725f8-kube-api-access-jsrjk\") pod \"nova-metadata-0\" (UID: \"4996a48c-a138-408e-b5cd-531962d725f8\") " pod="openstack/nova-metadata-0" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.504232 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4996a48c-a138-408e-b5cd-531962d725f8-config-data\") pod \"nova-metadata-0\" (UID: \"4996a48c-a138-408e-b5cd-531962d725f8\") " pod="openstack/nova-metadata-0" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.504317 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4996a48c-a138-408e-b5cd-531962d725f8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4996a48c-a138-408e-b5cd-531962d725f8\") " pod="openstack/nova-metadata-0" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.504344 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4996a48c-a138-408e-b5cd-531962d725f8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4996a48c-a138-408e-b5cd-531962d725f8\") " pod="openstack/nova-metadata-0" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.606538 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4996a48c-a138-408e-b5cd-531962d725f8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4996a48c-a138-408e-b5cd-531962d725f8\") " pod="openstack/nova-metadata-0" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.607005 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4996a48c-a138-408e-b5cd-531962d725f8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4996a48c-a138-408e-b5cd-531962d725f8\") " pod="openstack/nova-metadata-0" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.607126 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4996a48c-a138-408e-b5cd-531962d725f8-logs\") pod \"nova-metadata-0\" (UID: \"4996a48c-a138-408e-b5cd-531962d725f8\") " pod="openstack/nova-metadata-0" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.607147 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jsrjk\" (UniqueName: \"kubernetes.io/projected/4996a48c-a138-408e-b5cd-531962d725f8-kube-api-access-jsrjk\") pod \"nova-metadata-0\" (UID: \"4996a48c-a138-408e-b5cd-531962d725f8\") " pod="openstack/nova-metadata-0" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.607200 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4996a48c-a138-408e-b5cd-531962d725f8-config-data\") pod \"nova-metadata-0\" (UID: \"4996a48c-a138-408e-b5cd-531962d725f8\") " pod="openstack/nova-metadata-0" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.607715 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4996a48c-a138-408e-b5cd-531962d725f8-logs\") pod \"nova-metadata-0\" (UID: \"4996a48c-a138-408e-b5cd-531962d725f8\") " pod="openstack/nova-metadata-0" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.613920 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4996a48c-a138-408e-b5cd-531962d725f8-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"4996a48c-a138-408e-b5cd-531962d725f8\") " pod="openstack/nova-metadata-0" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.620837 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4996a48c-a138-408e-b5cd-531962d725f8-config-data\") pod \"nova-metadata-0\" (UID: \"4996a48c-a138-408e-b5cd-531962d725f8\") " pod="openstack/nova-metadata-0" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.625783 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4996a48c-a138-408e-b5cd-531962d725f8-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"4996a48c-a138-408e-b5cd-531962d725f8\") " pod="openstack/nova-metadata-0" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.652656 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jsrjk\" (UniqueName: \"kubernetes.io/projected/4996a48c-a138-408e-b5cd-531962d725f8-kube-api-access-jsrjk\") pod \"nova-metadata-0\" (UID: \"4996a48c-a138-408e-b5cd-531962d725f8\") " pod="openstack/nova-metadata-0" Feb 27 08:53:19 crc kubenswrapper[4906]: I0227 08:53:19.709074 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 27 08:53:21 crc kubenswrapper[4906]: I0227 08:53:20.208034 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:53:21 crc kubenswrapper[4906]: I0227 08:53:20.339123 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4996a48c-a138-408e-b5cd-531962d725f8","Type":"ContainerStarted","Data":"203e73c785b5d9a3d065de9b4f3901776870485565ade95f8ee97d44f3d0b58b"} Feb 27 08:53:21 crc kubenswrapper[4906]: I0227 08:53:20.566368 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02339013-9f1f-433a-8501-c1c62078eac2" path="/var/lib/kubelet/pods/02339013-9f1f-433a-8501-c1c62078eac2/volumes" Feb 27 08:53:21 crc kubenswrapper[4906]: I0227 08:53:21.350145 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4996a48c-a138-408e-b5cd-531962d725f8","Type":"ContainerStarted","Data":"42d480c2e89901b1481891529e4d6d486d4d5c92de83582e336c211b11ca5e2b"} Feb 27 08:53:21 crc kubenswrapper[4906]: I0227 08:53:21.350628 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4996a48c-a138-408e-b5cd-531962d725f8","Type":"ContainerStarted","Data":"923b96ea5ade69f4f276a23a0187ed4bf7bd5c52403b0345dad96e34d746d5c7"} Feb 27 08:53:21 crc kubenswrapper[4906]: I0227 08:53:21.383648 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.38362279 podStartE2EDuration="2.38362279s" podCreationTimestamp="2026-02-27 08:53:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:53:21.379745508 +0000 UTC m=+1499.774147138" watchObservedRunningTime="2026-02-27 08:53:21.38362279 +0000 UTC m=+1499.778024400" Feb 27 08:53:21 crc kubenswrapper[4906]: I0227 08:53:21.464859 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 27 08:53:21 crc kubenswrapper[4906]: I0227 08:53:21.466567 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 27 08:53:21 crc kubenswrapper[4906]: I0227 08:53:21.805705 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 27 08:53:21 crc kubenswrapper[4906]: I0227 08:53:21.805775 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 27 08:53:21 crc kubenswrapper[4906]: I0227 08:53:21.843069 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 27 08:53:21 crc kubenswrapper[4906]: I0227 08:53:21.982716 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:22 crc kubenswrapper[4906]: I0227 08:53:22.032187 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:53:22 crc kubenswrapper[4906]: I0227 08:53:22.152836 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65bc8f75b9-q99p2"] Feb 27 08:53:22 crc kubenswrapper[4906]: I0227 08:53:22.153190 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" podUID="34296caa-6147-4848-a0c6-2b5be70028d1" containerName="dnsmasq-dns" containerID="cri-o://9bd7487fb2a194f3772768b849738b9c4a53b98d5e42f771a716d3d6241a494a" gracePeriod=10 Feb 27 08:53:22 crc kubenswrapper[4906]: I0227 08:53:22.374518 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" podUID="34296caa-6147-4848-a0c6-2b5be70028d1" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.171:5353: connect: connection refused" Feb 27 08:53:22 crc kubenswrapper[4906]: I0227 08:53:22.382253 4906 generic.go:334] "Generic (PLEG): container finished" podID="293f689b-eec4-4963-9036-b5fc98dcbcaa" containerID="4f161e1ad93059ba80d68c27e36baaf34205e7be85744e29f0c874ca877875a9" exitCode=0 Feb 27 08:53:22 crc kubenswrapper[4906]: I0227 08:53:22.382339 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-ld4zf" event={"ID":"293f689b-eec4-4963-9036-b5fc98dcbcaa","Type":"ContainerDied","Data":"4f161e1ad93059ba80d68c27e36baaf34205e7be85744e29f0c874ca877875a9"} Feb 27 08:53:22 crc kubenswrapper[4906]: I0227 08:53:22.394808 4906 generic.go:334] "Generic (PLEG): container finished" podID="34296caa-6147-4848-a0c6-2b5be70028d1" containerID="9bd7487fb2a194f3772768b849738b9c4a53b98d5e42f771a716d3d6241a494a" exitCode=0 Feb 27 08:53:22 crc kubenswrapper[4906]: I0227 08:53:22.396144 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" event={"ID":"34296caa-6147-4848-a0c6-2b5be70028d1","Type":"ContainerDied","Data":"9bd7487fb2a194f3772768b849738b9c4a53b98d5e42f771a716d3d6241a494a"} Feb 27 08:53:22 crc kubenswrapper[4906]: I0227 08:53:22.443041 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 27 08:53:22 crc kubenswrapper[4906]: I0227 08:53:22.549079 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3dc624da-7d53-4588-9963-0db01ab501cf" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.192:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 27 08:53:22 crc kubenswrapper[4906]: I0227 08:53:22.549198 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="3dc624da-7d53-4588-9963-0db01ab501cf" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.192:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 27 08:53:22 crc kubenswrapper[4906]: I0227 08:53:22.926070 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.007454 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-ovsdbserver-nb\") pod \"34296caa-6147-4848-a0c6-2b5be70028d1\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.008003 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-config\") pod \"34296caa-6147-4848-a0c6-2b5be70028d1\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.008066 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-dns-svc\") pod \"34296caa-6147-4848-a0c6-2b5be70028d1\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.008273 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-dns-swift-storage-0\") pod \"34296caa-6147-4848-a0c6-2b5be70028d1\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.008313 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwnhd\" (UniqueName: \"kubernetes.io/projected/34296caa-6147-4848-a0c6-2b5be70028d1-kube-api-access-fwnhd\") pod \"34296caa-6147-4848-a0c6-2b5be70028d1\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.008456 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-ovsdbserver-sb\") pod \"34296caa-6147-4848-a0c6-2b5be70028d1\" (UID: \"34296caa-6147-4848-a0c6-2b5be70028d1\") " Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.022725 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34296caa-6147-4848-a0c6-2b5be70028d1-kube-api-access-fwnhd" (OuterVolumeSpecName: "kube-api-access-fwnhd") pod "34296caa-6147-4848-a0c6-2b5be70028d1" (UID: "34296caa-6147-4848-a0c6-2b5be70028d1"). InnerVolumeSpecName "kube-api-access-fwnhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.099460 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "34296caa-6147-4848-a0c6-2b5be70028d1" (UID: "34296caa-6147-4848-a0c6-2b5be70028d1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.101750 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "34296caa-6147-4848-a0c6-2b5be70028d1" (UID: "34296caa-6147-4848-a0c6-2b5be70028d1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.104907 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "34296caa-6147-4848-a0c6-2b5be70028d1" (UID: "34296caa-6147-4848-a0c6-2b5be70028d1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.112018 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-config" (OuterVolumeSpecName: "config") pod "34296caa-6147-4848-a0c6-2b5be70028d1" (UID: "34296caa-6147-4848-a0c6-2b5be70028d1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.116232 4906 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.116293 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwnhd\" (UniqueName: \"kubernetes.io/projected/34296caa-6147-4848-a0c6-2b5be70028d1-kube-api-access-fwnhd\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.116312 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.116324 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.116336 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.118482 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "34296caa-6147-4848-a0c6-2b5be70028d1" (UID: "34296caa-6147-4848-a0c6-2b5be70028d1"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.218099 4906 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/34296caa-6147-4848-a0c6-2b5be70028d1-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.408631 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" event={"ID":"34296caa-6147-4848-a0c6-2b5be70028d1","Type":"ContainerDied","Data":"797666ea950012bc2a0556a55ebf7e99c7864e5ed8536711370e68b1c3afd33b"} Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.408713 4906 scope.go:117] "RemoveContainer" containerID="9bd7487fb2a194f3772768b849738b9c4a53b98d5e42f771a716d3d6241a494a" Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.408715 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-65bc8f75b9-q99p2" Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.440956 4906 scope.go:117] "RemoveContainer" containerID="a7211580abb6aae0dd574ff634bcff55f0a983f9b680266c274c60007b7d18f5" Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.447734 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-65bc8f75b9-q99p2"] Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.458426 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-65bc8f75b9-q99p2"] Feb 27 08:53:23 crc kubenswrapper[4906]: I0227 08:53:23.864466 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-ld4zf" Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.036055 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/293f689b-eec4-4963-9036-b5fc98dcbcaa-combined-ca-bundle\") pod \"293f689b-eec4-4963-9036-b5fc98dcbcaa\" (UID: \"293f689b-eec4-4963-9036-b5fc98dcbcaa\") " Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.036238 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jn5b4\" (UniqueName: \"kubernetes.io/projected/293f689b-eec4-4963-9036-b5fc98dcbcaa-kube-api-access-jn5b4\") pod \"293f689b-eec4-4963-9036-b5fc98dcbcaa\" (UID: \"293f689b-eec4-4963-9036-b5fc98dcbcaa\") " Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.036267 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/293f689b-eec4-4963-9036-b5fc98dcbcaa-scripts\") pod \"293f689b-eec4-4963-9036-b5fc98dcbcaa\" (UID: \"293f689b-eec4-4963-9036-b5fc98dcbcaa\") " Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.036339 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/293f689b-eec4-4963-9036-b5fc98dcbcaa-config-data\") pod \"293f689b-eec4-4963-9036-b5fc98dcbcaa\" (UID: \"293f689b-eec4-4963-9036-b5fc98dcbcaa\") " Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.045942 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/293f689b-eec4-4963-9036-b5fc98dcbcaa-kube-api-access-jn5b4" (OuterVolumeSpecName: "kube-api-access-jn5b4") pod "293f689b-eec4-4963-9036-b5fc98dcbcaa" (UID: "293f689b-eec4-4963-9036-b5fc98dcbcaa"). InnerVolumeSpecName "kube-api-access-jn5b4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.049470 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/293f689b-eec4-4963-9036-b5fc98dcbcaa-scripts" (OuterVolumeSpecName: "scripts") pod "293f689b-eec4-4963-9036-b5fc98dcbcaa" (UID: "293f689b-eec4-4963-9036-b5fc98dcbcaa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.074575 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/293f689b-eec4-4963-9036-b5fc98dcbcaa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "293f689b-eec4-4963-9036-b5fc98dcbcaa" (UID: "293f689b-eec4-4963-9036-b5fc98dcbcaa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.084942 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/293f689b-eec4-4963-9036-b5fc98dcbcaa-config-data" (OuterVolumeSpecName: "config-data") pod "293f689b-eec4-4963-9036-b5fc98dcbcaa" (UID: "293f689b-eec4-4963-9036-b5fc98dcbcaa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.137873 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/293f689b-eec4-4963-9036-b5fc98dcbcaa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.138175 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jn5b4\" (UniqueName: \"kubernetes.io/projected/293f689b-eec4-4963-9036-b5fc98dcbcaa-kube-api-access-jn5b4\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.138239 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/293f689b-eec4-4963-9036-b5fc98dcbcaa-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.138287 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/293f689b-eec4-4963-9036-b5fc98dcbcaa-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.421048 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-ld4zf" event={"ID":"293f689b-eec4-4963-9036-b5fc98dcbcaa","Type":"ContainerDied","Data":"1776dd6488a1dbee31350c2a5113ed12fb585becfa5cb6d9c792547ef35ad115"} Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.422490 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1776dd6488a1dbee31350c2a5113ed12fb585becfa5cb6d9c792547ef35ad115" Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.421131 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-ld4zf" Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.572304 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34296caa-6147-4848-a0c6-2b5be70028d1" path="/var/lib/kubelet/pods/34296caa-6147-4848-a0c6-2b5be70028d1/volumes" Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.573197 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.573269 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.573523 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="b812e4ee-589d-42ec-b2b4-c57bb24520de" containerName="nova-scheduler-scheduler" containerID="cri-o://917554ca1b9760f7773682e1b02edbbf360410c80e1a42bcc88cbd7923866fe2" gracePeriod=30 Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.573827 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3dc624da-7d53-4588-9963-0db01ab501cf" containerName="nova-api-log" containerID="cri-o://9b35daa687f175c3ecbdd4fc565e08e63d823d2088c43e55069f5fc4657b63d8" gracePeriod=30 Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.575642 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="3dc624da-7d53-4588-9963-0db01ab501cf" containerName="nova-api-api" containerID="cri-o://b39c9bfcb5b446991e94a358e7100f9f8bdc72d5b27edcef064189c1c479f1d5" gracePeriod=30 Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.584206 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.586667 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4996a48c-a138-408e-b5cd-531962d725f8" containerName="nova-metadata-metadata" containerID="cri-o://42d480c2e89901b1481891529e4d6d486d4d5c92de83582e336c211b11ca5e2b" gracePeriod=30 Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.586907 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="4996a48c-a138-408e-b5cd-531962d725f8" containerName="nova-metadata-log" containerID="cri-o://923b96ea5ade69f4f276a23a0187ed4bf7bd5c52403b0345dad96e34d746d5c7" gracePeriod=30 Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.709554 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.709616 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.844631 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:53:24 crc kubenswrapper[4906]: I0227 08:53:24.845367 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.447656 4906 generic.go:334] "Generic (PLEG): container finished" podID="4996a48c-a138-408e-b5cd-531962d725f8" containerID="42d480c2e89901b1481891529e4d6d486d4d5c92de83582e336c211b11ca5e2b" exitCode=0 Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.447705 4906 generic.go:334] "Generic (PLEG): container finished" podID="4996a48c-a138-408e-b5cd-531962d725f8" containerID="923b96ea5ade69f4f276a23a0187ed4bf7bd5c52403b0345dad96e34d746d5c7" exitCode=143 Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.447745 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4996a48c-a138-408e-b5cd-531962d725f8","Type":"ContainerDied","Data":"42d480c2e89901b1481891529e4d6d486d4d5c92de83582e336c211b11ca5e2b"} Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.447817 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4996a48c-a138-408e-b5cd-531962d725f8","Type":"ContainerDied","Data":"923b96ea5ade69f4f276a23a0187ed4bf7bd5c52403b0345dad96e34d746d5c7"} Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.450578 4906 generic.go:334] "Generic (PLEG): container finished" podID="3dc624da-7d53-4588-9963-0db01ab501cf" containerID="9b35daa687f175c3ecbdd4fc565e08e63d823d2088c43e55069f5fc4657b63d8" exitCode=143 Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.450625 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3dc624da-7d53-4588-9963-0db01ab501cf","Type":"ContainerDied","Data":"9b35daa687f175c3ecbdd4fc565e08e63d823d2088c43e55069f5fc4657b63d8"} Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.776314 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.886450 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4996a48c-a138-408e-b5cd-531962d725f8-combined-ca-bundle\") pod \"4996a48c-a138-408e-b5cd-531962d725f8\" (UID: \"4996a48c-a138-408e-b5cd-531962d725f8\") " Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.886504 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4996a48c-a138-408e-b5cd-531962d725f8-logs\") pod \"4996a48c-a138-408e-b5cd-531962d725f8\" (UID: \"4996a48c-a138-408e-b5cd-531962d725f8\") " Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.886596 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4996a48c-a138-408e-b5cd-531962d725f8-nova-metadata-tls-certs\") pod \"4996a48c-a138-408e-b5cd-531962d725f8\" (UID: \"4996a48c-a138-408e-b5cd-531962d725f8\") " Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.886634 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4996a48c-a138-408e-b5cd-531962d725f8-config-data\") pod \"4996a48c-a138-408e-b5cd-531962d725f8\" (UID: \"4996a48c-a138-408e-b5cd-531962d725f8\") " Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.886763 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jsrjk\" (UniqueName: \"kubernetes.io/projected/4996a48c-a138-408e-b5cd-531962d725f8-kube-api-access-jsrjk\") pod \"4996a48c-a138-408e-b5cd-531962d725f8\" (UID: \"4996a48c-a138-408e-b5cd-531962d725f8\") " Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.887100 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4996a48c-a138-408e-b5cd-531962d725f8-logs" (OuterVolumeSpecName: "logs") pod "4996a48c-a138-408e-b5cd-531962d725f8" (UID: "4996a48c-a138-408e-b5cd-531962d725f8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.897827 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4996a48c-a138-408e-b5cd-531962d725f8-kube-api-access-jsrjk" (OuterVolumeSpecName: "kube-api-access-jsrjk") pod "4996a48c-a138-408e-b5cd-531962d725f8" (UID: "4996a48c-a138-408e-b5cd-531962d725f8"). InnerVolumeSpecName "kube-api-access-jsrjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.934801 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4996a48c-a138-408e-b5cd-531962d725f8-config-data" (OuterVolumeSpecName: "config-data") pod "4996a48c-a138-408e-b5cd-531962d725f8" (UID: "4996a48c-a138-408e-b5cd-531962d725f8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.938190 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4996a48c-a138-408e-b5cd-531962d725f8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4996a48c-a138-408e-b5cd-531962d725f8" (UID: "4996a48c-a138-408e-b5cd-531962d725f8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.972318 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4996a48c-a138-408e-b5cd-531962d725f8-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "4996a48c-a138-408e-b5cd-531962d725f8" (UID: "4996a48c-a138-408e-b5cd-531962d725f8"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.990124 4906 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/4996a48c-a138-408e-b5cd-531962d725f8-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.990182 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4996a48c-a138-408e-b5cd-531962d725f8-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.990196 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jsrjk\" (UniqueName: \"kubernetes.io/projected/4996a48c-a138-408e-b5cd-531962d725f8-kube-api-access-jsrjk\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.990210 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4996a48c-a138-408e-b5cd-531962d725f8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:25 crc kubenswrapper[4906]: I0227 08:53:25.990222 4906 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4996a48c-a138-408e-b5cd-531962d725f8-logs\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.463864 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"4996a48c-a138-408e-b5cd-531962d725f8","Type":"ContainerDied","Data":"203e73c785b5d9a3d065de9b4f3901776870485565ade95f8ee97d44f3d0b58b"} Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.463962 4906 scope.go:117] "RemoveContainer" containerID="42d480c2e89901b1481891529e4d6d486d4d5c92de83582e336c211b11ca5e2b" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.464142 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.473732 4906 generic.go:334] "Generic (PLEG): container finished" podID="b812e4ee-589d-42ec-b2b4-c57bb24520de" containerID="917554ca1b9760f7773682e1b02edbbf360410c80e1a42bcc88cbd7923866fe2" exitCode=0 Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.473789 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b812e4ee-589d-42ec-b2b4-c57bb24520de","Type":"ContainerDied","Data":"917554ca1b9760f7773682e1b02edbbf360410c80e1a42bcc88cbd7923866fe2"} Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.520791 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.530401 4906 scope.go:117] "RemoveContainer" containerID="923b96ea5ade69f4f276a23a0187ed4bf7bd5c52403b0345dad96e34d746d5c7" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.534717 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.573377 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4996a48c-a138-408e-b5cd-531962d725f8" path="/var/lib/kubelet/pods/4996a48c-a138-408e-b5cd-531962d725f8/volumes" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.574213 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:53:26 crc kubenswrapper[4906]: E0227 08:53:26.574706 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="293f689b-eec4-4963-9036-b5fc98dcbcaa" containerName="nova-manage" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.574728 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="293f689b-eec4-4963-9036-b5fc98dcbcaa" containerName="nova-manage" Feb 27 08:53:26 crc kubenswrapper[4906]: E0227 08:53:26.574744 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4996a48c-a138-408e-b5cd-531962d725f8" containerName="nova-metadata-metadata" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.574751 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="4996a48c-a138-408e-b5cd-531962d725f8" containerName="nova-metadata-metadata" Feb 27 08:53:26 crc kubenswrapper[4906]: E0227 08:53:26.574768 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34296caa-6147-4848-a0c6-2b5be70028d1" containerName="dnsmasq-dns" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.574774 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="34296caa-6147-4848-a0c6-2b5be70028d1" containerName="dnsmasq-dns" Feb 27 08:53:26 crc kubenswrapper[4906]: E0227 08:53:26.574808 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4996a48c-a138-408e-b5cd-531962d725f8" containerName="nova-metadata-log" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.574814 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="4996a48c-a138-408e-b5cd-531962d725f8" containerName="nova-metadata-log" Feb 27 08:53:26 crc kubenswrapper[4906]: E0227 08:53:26.574827 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34296caa-6147-4848-a0c6-2b5be70028d1" containerName="init" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.574835 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="34296caa-6147-4848-a0c6-2b5be70028d1" containerName="init" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.575060 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="4996a48c-a138-408e-b5cd-531962d725f8" containerName="nova-metadata-log" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.575080 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="293f689b-eec4-4963-9036-b5fc98dcbcaa" containerName="nova-manage" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.575090 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="4996a48c-a138-408e-b5cd-531962d725f8" containerName="nova-metadata-metadata" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.575109 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="34296caa-6147-4848-a0c6-2b5be70028d1" containerName="dnsmasq-dns" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.577120 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.582375 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.584087 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.597584 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.705551 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d883848-7012-4c67-aad8-02d879f33ae5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3d883848-7012-4c67-aad8-02d879f33ae5\") " pod="openstack/nova-metadata-0" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.705687 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d883848-7012-4c67-aad8-02d879f33ae5-config-data\") pod \"nova-metadata-0\" (UID: \"3d883848-7012-4c67-aad8-02d879f33ae5\") " pod="openstack/nova-metadata-0" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.705795 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d883848-7012-4c67-aad8-02d879f33ae5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3d883848-7012-4c67-aad8-02d879f33ae5\") " pod="openstack/nova-metadata-0" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.705939 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d883848-7012-4c67-aad8-02d879f33ae5-logs\") pod \"nova-metadata-0\" (UID: \"3d883848-7012-4c67-aad8-02d879f33ae5\") " pod="openstack/nova-metadata-0" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.706012 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfsr6\" (UniqueName: \"kubernetes.io/projected/3d883848-7012-4c67-aad8-02d879f33ae5-kube-api-access-vfsr6\") pod \"nova-metadata-0\" (UID: \"3d883848-7012-4c67-aad8-02d879f33ae5\") " pod="openstack/nova-metadata-0" Feb 27 08:53:26 crc kubenswrapper[4906]: E0227 08:53:26.806200 4906 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 917554ca1b9760f7773682e1b02edbbf360410c80e1a42bcc88cbd7923866fe2 is running failed: container process not found" containerID="917554ca1b9760f7773682e1b02edbbf360410c80e1a42bcc88cbd7923866fe2" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 27 08:53:26 crc kubenswrapper[4906]: E0227 08:53:26.807170 4906 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 917554ca1b9760f7773682e1b02edbbf360410c80e1a42bcc88cbd7923866fe2 is running failed: container process not found" containerID="917554ca1b9760f7773682e1b02edbbf360410c80e1a42bcc88cbd7923866fe2" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 27 08:53:26 crc kubenswrapper[4906]: E0227 08:53:26.807472 4906 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 917554ca1b9760f7773682e1b02edbbf360410c80e1a42bcc88cbd7923866fe2 is running failed: container process not found" containerID="917554ca1b9760f7773682e1b02edbbf360410c80e1a42bcc88cbd7923866fe2" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Feb 27 08:53:26 crc kubenswrapper[4906]: E0227 08:53:26.807534 4906 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 917554ca1b9760f7773682e1b02edbbf360410c80e1a42bcc88cbd7923866fe2 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="b812e4ee-589d-42ec-b2b4-c57bb24520de" containerName="nova-scheduler-scheduler" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.808390 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d883848-7012-4c67-aad8-02d879f33ae5-logs\") pod \"nova-metadata-0\" (UID: \"3d883848-7012-4c67-aad8-02d879f33ae5\") " pod="openstack/nova-metadata-0" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.808499 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfsr6\" (UniqueName: \"kubernetes.io/projected/3d883848-7012-4c67-aad8-02d879f33ae5-kube-api-access-vfsr6\") pod \"nova-metadata-0\" (UID: \"3d883848-7012-4c67-aad8-02d879f33ae5\") " pod="openstack/nova-metadata-0" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.808607 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d883848-7012-4c67-aad8-02d879f33ae5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3d883848-7012-4c67-aad8-02d879f33ae5\") " pod="openstack/nova-metadata-0" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.808668 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d883848-7012-4c67-aad8-02d879f33ae5-config-data\") pod \"nova-metadata-0\" (UID: \"3d883848-7012-4c67-aad8-02d879f33ae5\") " pod="openstack/nova-metadata-0" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.808716 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d883848-7012-4c67-aad8-02d879f33ae5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3d883848-7012-4c67-aad8-02d879f33ae5\") " pod="openstack/nova-metadata-0" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.809669 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d883848-7012-4c67-aad8-02d879f33ae5-logs\") pod \"nova-metadata-0\" (UID: \"3d883848-7012-4c67-aad8-02d879f33ae5\") " pod="openstack/nova-metadata-0" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.814566 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d883848-7012-4c67-aad8-02d879f33ae5-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"3d883848-7012-4c67-aad8-02d879f33ae5\") " pod="openstack/nova-metadata-0" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.815282 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d883848-7012-4c67-aad8-02d879f33ae5-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"3d883848-7012-4c67-aad8-02d879f33ae5\") " pod="openstack/nova-metadata-0" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.817049 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d883848-7012-4c67-aad8-02d879f33ae5-config-data\") pod \"nova-metadata-0\" (UID: \"3d883848-7012-4c67-aad8-02d879f33ae5\") " pod="openstack/nova-metadata-0" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.832421 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfsr6\" (UniqueName: \"kubernetes.io/projected/3d883848-7012-4c67-aad8-02d879f33ae5-kube-api-access-vfsr6\") pod \"nova-metadata-0\" (UID: \"3d883848-7012-4c67-aad8-02d879f33ae5\") " pod="openstack/nova-metadata-0" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.900505 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 27 08:53:26 crc kubenswrapper[4906]: I0227 08:53:26.922718 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.012943 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9sgr7\" (UniqueName: \"kubernetes.io/projected/b812e4ee-589d-42ec-b2b4-c57bb24520de-kube-api-access-9sgr7\") pod \"b812e4ee-589d-42ec-b2b4-c57bb24520de\" (UID: \"b812e4ee-589d-42ec-b2b4-c57bb24520de\") " Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.013048 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b812e4ee-589d-42ec-b2b4-c57bb24520de-config-data\") pod \"b812e4ee-589d-42ec-b2b4-c57bb24520de\" (UID: \"b812e4ee-589d-42ec-b2b4-c57bb24520de\") " Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.013094 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b812e4ee-589d-42ec-b2b4-c57bb24520de-combined-ca-bundle\") pod \"b812e4ee-589d-42ec-b2b4-c57bb24520de\" (UID: \"b812e4ee-589d-42ec-b2b4-c57bb24520de\") " Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.082234 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b812e4ee-589d-42ec-b2b4-c57bb24520de-config-data" (OuterVolumeSpecName: "config-data") pod "b812e4ee-589d-42ec-b2b4-c57bb24520de" (UID: "b812e4ee-589d-42ec-b2b4-c57bb24520de"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.110783 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b812e4ee-589d-42ec-b2b4-c57bb24520de-kube-api-access-9sgr7" (OuterVolumeSpecName: "kube-api-access-9sgr7") pod "b812e4ee-589d-42ec-b2b4-c57bb24520de" (UID: "b812e4ee-589d-42ec-b2b4-c57bb24520de"). InnerVolumeSpecName "kube-api-access-9sgr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.115083 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b812e4ee-589d-42ec-b2b4-c57bb24520de-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b812e4ee-589d-42ec-b2b4-c57bb24520de" (UID: "b812e4ee-589d-42ec-b2b4-c57bb24520de"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.116653 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9sgr7\" (UniqueName: \"kubernetes.io/projected/b812e4ee-589d-42ec-b2b4-c57bb24520de-kube-api-access-9sgr7\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.116692 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b812e4ee-589d-42ec-b2b4-c57bb24520de-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.116703 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b812e4ee-589d-42ec-b2b4-c57bb24520de-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.485741 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.487032 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"b812e4ee-589d-42ec-b2b4-c57bb24520de","Type":"ContainerDied","Data":"f37901d83ec59d93cdcf25be953283ddd9155066f0fbdacac30f5dce55904d60"} Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.487043 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.487085 4906 scope.go:117] "RemoveContainer" containerID="917554ca1b9760f7773682e1b02edbbf360410c80e1a42bcc88cbd7923866fe2" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.570529 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.600758 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.654045 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 27 08:53:27 crc kubenswrapper[4906]: E0227 08:53:27.655493 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b812e4ee-589d-42ec-b2b4-c57bb24520de" containerName="nova-scheduler-scheduler" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.655516 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="b812e4ee-589d-42ec-b2b4-c57bb24520de" containerName="nova-scheduler-scheduler" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.655923 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="b812e4ee-589d-42ec-b2b4-c57bb24520de" containerName="nova-scheduler-scheduler" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.664037 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.674317 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.765025 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.780712 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3491540-7e85-4eca-b7f2-75177b711909-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d3491540-7e85-4eca-b7f2-75177b711909\") " pod="openstack/nova-scheduler-0" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.780796 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8k7hp\" (UniqueName: \"kubernetes.io/projected/d3491540-7e85-4eca-b7f2-75177b711909-kube-api-access-8k7hp\") pod \"nova-scheduler-0\" (UID: \"d3491540-7e85-4eca-b7f2-75177b711909\") " pod="openstack/nova-scheduler-0" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.780924 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3491540-7e85-4eca-b7f2-75177b711909-config-data\") pod \"nova-scheduler-0\" (UID: \"d3491540-7e85-4eca-b7f2-75177b711909\") " pod="openstack/nova-scheduler-0" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.885604 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3491540-7e85-4eca-b7f2-75177b711909-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d3491540-7e85-4eca-b7f2-75177b711909\") " pod="openstack/nova-scheduler-0" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.885726 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8k7hp\" (UniqueName: \"kubernetes.io/projected/d3491540-7e85-4eca-b7f2-75177b711909-kube-api-access-8k7hp\") pod \"nova-scheduler-0\" (UID: \"d3491540-7e85-4eca-b7f2-75177b711909\") " pod="openstack/nova-scheduler-0" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.885763 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3491540-7e85-4eca-b7f2-75177b711909-config-data\") pod \"nova-scheduler-0\" (UID: \"d3491540-7e85-4eca-b7f2-75177b711909\") " pod="openstack/nova-scheduler-0" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.896365 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3491540-7e85-4eca-b7f2-75177b711909-config-data\") pod \"nova-scheduler-0\" (UID: \"d3491540-7e85-4eca-b7f2-75177b711909\") " pod="openstack/nova-scheduler-0" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.911402 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3491540-7e85-4eca-b7f2-75177b711909-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d3491540-7e85-4eca-b7f2-75177b711909\") " pod="openstack/nova-scheduler-0" Feb 27 08:53:27 crc kubenswrapper[4906]: I0227 08:53:27.914052 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8k7hp\" (UniqueName: \"kubernetes.io/projected/d3491540-7e85-4eca-b7f2-75177b711909-kube-api-access-8k7hp\") pod \"nova-scheduler-0\" (UID: \"d3491540-7e85-4eca-b7f2-75177b711909\") " pod="openstack/nova-scheduler-0" Feb 27 08:53:28 crc kubenswrapper[4906]: I0227 08:53:28.099977 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 27 08:53:28 crc kubenswrapper[4906]: I0227 08:53:28.504647 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3d883848-7012-4c67-aad8-02d879f33ae5","Type":"ContainerStarted","Data":"e0ae8c54a2b22aa5e543d5b04fd694e1f986afb499c37cb191e16137139a1cd4"} Feb 27 08:53:28 crc kubenswrapper[4906]: I0227 08:53:28.505188 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3d883848-7012-4c67-aad8-02d879f33ae5","Type":"ContainerStarted","Data":"5021c021ac0124df8ecf9479358e6dca171987830f6da5d7e779eb70255367e7"} Feb 27 08:53:28 crc kubenswrapper[4906]: I0227 08:53:28.505206 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3d883848-7012-4c67-aad8-02d879f33ae5","Type":"ContainerStarted","Data":"4c9b02090932710bbff1cc3396ca96729e098f7cd2b614c911b6c57b2c3a9333"} Feb 27 08:53:28 crc kubenswrapper[4906]: I0227 08:53:28.538937 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.53890352 podStartE2EDuration="2.53890352s" podCreationTimestamp="2026-02-27 08:53:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:53:28.531327981 +0000 UTC m=+1506.925729591" watchObservedRunningTime="2026-02-27 08:53:28.53890352 +0000 UTC m=+1506.933305140" Feb 27 08:53:28 crc kubenswrapper[4906]: I0227 08:53:28.564997 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b812e4ee-589d-42ec-b2b4-c57bb24520de" path="/var/lib/kubelet/pods/b812e4ee-589d-42ec-b2b4-c57bb24520de/volumes" Feb 27 08:53:28 crc kubenswrapper[4906]: I0227 08:53:28.590073 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 27 08:53:29 crc kubenswrapper[4906]: I0227 08:53:29.518367 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d3491540-7e85-4eca-b7f2-75177b711909","Type":"ContainerStarted","Data":"4c22a414612ccf4c95202b279acc131911598f279c5659aa3f516e3dcfbbbee9"} Feb 27 08:53:29 crc kubenswrapper[4906]: I0227 08:53:29.519159 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d3491540-7e85-4eca-b7f2-75177b711909","Type":"ContainerStarted","Data":"66408c5f453811e5d3178a5a186640ad65afe1ce0913e8b14fb1b7e94baa64ea"} Feb 27 08:53:29 crc kubenswrapper[4906]: I0227 08:53:29.524903 4906 generic.go:334] "Generic (PLEG): container finished" podID="3dc624da-7d53-4588-9963-0db01ab501cf" containerID="b39c9bfcb5b446991e94a358e7100f9f8bdc72d5b27edcef064189c1c479f1d5" exitCode=0 Feb 27 08:53:29 crc kubenswrapper[4906]: I0227 08:53:29.524918 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3dc624da-7d53-4588-9963-0db01ab501cf","Type":"ContainerDied","Data":"b39c9bfcb5b446991e94a358e7100f9f8bdc72d5b27edcef064189c1c479f1d5"} Feb 27 08:53:29 crc kubenswrapper[4906]: I0227 08:53:29.524993 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"3dc624da-7d53-4588-9963-0db01ab501cf","Type":"ContainerDied","Data":"22ac00e28b795db1cae47fb060a3bf77aeaaa8237218023d584272640e7d93f1"} Feb 27 08:53:29 crc kubenswrapper[4906]: I0227 08:53:29.525009 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="22ac00e28b795db1cae47fb060a3bf77aeaaa8237218023d584272640e7d93f1" Feb 27 08:53:29 crc kubenswrapper[4906]: I0227 08:53:29.532325 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 27 08:53:29 crc kubenswrapper[4906]: I0227 08:53:29.548025 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.547996722 podStartE2EDuration="2.547996722s" podCreationTimestamp="2026-02-27 08:53:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:53:29.541912852 +0000 UTC m=+1507.936314462" watchObservedRunningTime="2026-02-27 08:53:29.547996722 +0000 UTC m=+1507.942398332" Feb 27 08:53:29 crc kubenswrapper[4906]: I0227 08:53:29.630292 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dc624da-7d53-4588-9963-0db01ab501cf-combined-ca-bundle\") pod \"3dc624da-7d53-4588-9963-0db01ab501cf\" (UID: \"3dc624da-7d53-4588-9963-0db01ab501cf\") " Feb 27 08:53:29 crc kubenswrapper[4906]: I0227 08:53:29.630348 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dc624da-7d53-4588-9963-0db01ab501cf-config-data\") pod \"3dc624da-7d53-4588-9963-0db01ab501cf\" (UID: \"3dc624da-7d53-4588-9963-0db01ab501cf\") " Feb 27 08:53:29 crc kubenswrapper[4906]: I0227 08:53:29.630370 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dc624da-7d53-4588-9963-0db01ab501cf-logs\") pod \"3dc624da-7d53-4588-9963-0db01ab501cf\" (UID: \"3dc624da-7d53-4588-9963-0db01ab501cf\") " Feb 27 08:53:29 crc kubenswrapper[4906]: I0227 08:53:29.630549 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gp6dn\" (UniqueName: \"kubernetes.io/projected/3dc624da-7d53-4588-9963-0db01ab501cf-kube-api-access-gp6dn\") pod \"3dc624da-7d53-4588-9963-0db01ab501cf\" (UID: \"3dc624da-7d53-4588-9963-0db01ab501cf\") " Feb 27 08:53:29 crc kubenswrapper[4906]: I0227 08:53:29.632070 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3dc624da-7d53-4588-9963-0db01ab501cf-logs" (OuterVolumeSpecName: "logs") pod "3dc624da-7d53-4588-9963-0db01ab501cf" (UID: "3dc624da-7d53-4588-9963-0db01ab501cf"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:53:29 crc kubenswrapper[4906]: I0227 08:53:29.633101 4906 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3dc624da-7d53-4588-9963-0db01ab501cf-logs\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:29 crc kubenswrapper[4906]: I0227 08:53:29.637742 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dc624da-7d53-4588-9963-0db01ab501cf-kube-api-access-gp6dn" (OuterVolumeSpecName: "kube-api-access-gp6dn") pod "3dc624da-7d53-4588-9963-0db01ab501cf" (UID: "3dc624da-7d53-4588-9963-0db01ab501cf"). InnerVolumeSpecName "kube-api-access-gp6dn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:53:29 crc kubenswrapper[4906]: I0227 08:53:29.671427 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dc624da-7d53-4588-9963-0db01ab501cf-config-data" (OuterVolumeSpecName: "config-data") pod "3dc624da-7d53-4588-9963-0db01ab501cf" (UID: "3dc624da-7d53-4588-9963-0db01ab501cf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:29 crc kubenswrapper[4906]: I0227 08:53:29.680096 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3dc624da-7d53-4588-9963-0db01ab501cf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3dc624da-7d53-4588-9963-0db01ab501cf" (UID: "3dc624da-7d53-4588-9963-0db01ab501cf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:29 crc kubenswrapper[4906]: I0227 08:53:29.736503 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gp6dn\" (UniqueName: \"kubernetes.io/projected/3dc624da-7d53-4588-9963-0db01ab501cf-kube-api-access-gp6dn\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:29 crc kubenswrapper[4906]: I0227 08:53:29.736565 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dc624da-7d53-4588-9963-0db01ab501cf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:29 crc kubenswrapper[4906]: I0227 08:53:29.736588 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dc624da-7d53-4588-9963-0db01ab501cf-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.290261 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.536128 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.586175 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.604421 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.616254 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 27 08:53:30 crc kubenswrapper[4906]: E0227 08:53:30.616978 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dc624da-7d53-4588-9963-0db01ab501cf" containerName="nova-api-api" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.617004 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dc624da-7d53-4588-9963-0db01ab501cf" containerName="nova-api-api" Feb 27 08:53:30 crc kubenswrapper[4906]: E0227 08:53:30.617022 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dc624da-7d53-4588-9963-0db01ab501cf" containerName="nova-api-log" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.617031 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dc624da-7d53-4588-9963-0db01ab501cf" containerName="nova-api-log" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.617270 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dc624da-7d53-4588-9963-0db01ab501cf" containerName="nova-api-log" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.617292 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dc624da-7d53-4588-9963-0db01ab501cf" containerName="nova-api-api" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.618565 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.622412 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.633772 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.757775 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-config-data\") pod \"nova-api-0\" (UID: \"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292\") " pod="openstack/nova-api-0" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.757864 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292\") " pod="openstack/nova-api-0" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.757951 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-logs\") pod \"nova-api-0\" (UID: \"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292\") " pod="openstack/nova-api-0" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.758047 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxvjm\" (UniqueName: \"kubernetes.io/projected/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-kube-api-access-jxvjm\") pod \"nova-api-0\" (UID: \"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292\") " pod="openstack/nova-api-0" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.862482 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxvjm\" (UniqueName: \"kubernetes.io/projected/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-kube-api-access-jxvjm\") pod \"nova-api-0\" (UID: \"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292\") " pod="openstack/nova-api-0" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.862603 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-config-data\") pod \"nova-api-0\" (UID: \"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292\") " pod="openstack/nova-api-0" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.862640 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292\") " pod="openstack/nova-api-0" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.862672 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-logs\") pod \"nova-api-0\" (UID: \"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292\") " pod="openstack/nova-api-0" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.863208 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-logs\") pod \"nova-api-0\" (UID: \"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292\") " pod="openstack/nova-api-0" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.873004 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292\") " pod="openstack/nova-api-0" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.878621 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-config-data\") pod \"nova-api-0\" (UID: \"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292\") " pod="openstack/nova-api-0" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.890112 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxvjm\" (UniqueName: \"kubernetes.io/projected/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-kube-api-access-jxvjm\") pod \"nova-api-0\" (UID: \"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292\") " pod="openstack/nova-api-0" Feb 27 08:53:30 crc kubenswrapper[4906]: I0227 08:53:30.945851 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 27 08:53:31 crc kubenswrapper[4906]: I0227 08:53:31.457117 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 27 08:53:31 crc kubenswrapper[4906]: I0227 08:53:31.552299 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292","Type":"ContainerStarted","Data":"7286ed2578c7e55e85c293f0e0ba876029696aea6f368d7c43ae85e4243da7ef"} Feb 27 08:53:31 crc kubenswrapper[4906]: I0227 08:53:31.927310 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 27 08:53:31 crc kubenswrapper[4906]: I0227 08:53:31.929126 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 27 08:53:32 crc kubenswrapper[4906]: I0227 08:53:32.568036 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3dc624da-7d53-4588-9963-0db01ab501cf" path="/var/lib/kubelet/pods/3dc624da-7d53-4588-9963-0db01ab501cf/volumes" Feb 27 08:53:32 crc kubenswrapper[4906]: I0227 08:53:32.572422 4906 generic.go:334] "Generic (PLEG): container finished" podID="bcd22869-73ce-4c75-8628-2bf971da33d5" containerID="155a4e047dac2c401b8f299eb631cf5e030fa9aa5a6ce35a4e8ed5a7b5c28f94" exitCode=0 Feb 27 08:53:32 crc kubenswrapper[4906]: I0227 08:53:32.572486 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-n9r82" event={"ID":"bcd22869-73ce-4c75-8628-2bf971da33d5","Type":"ContainerDied","Data":"155a4e047dac2c401b8f299eb631cf5e030fa9aa5a6ce35a4e8ed5a7b5c28f94"} Feb 27 08:53:32 crc kubenswrapper[4906]: I0227 08:53:32.576245 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292","Type":"ContainerStarted","Data":"cfcca24aa51c77dcd3566e35f01ad6c1e1cccf76e8fcb2234aca39befd64b639"} Feb 27 08:53:32 crc kubenswrapper[4906]: I0227 08:53:32.576306 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292","Type":"ContainerStarted","Data":"1a3580e83896166cc75f475eb68df842375f4c489966a978ea7fbb6ed676e61a"} Feb 27 08:53:32 crc kubenswrapper[4906]: I0227 08:53:32.654651 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.654621231 podStartE2EDuration="2.654621231s" podCreationTimestamp="2026-02-27 08:53:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:53:32.642831681 +0000 UTC m=+1511.037233291" watchObservedRunningTime="2026-02-27 08:53:32.654621231 +0000 UTC m=+1511.049022841" Feb 27 08:53:33 crc kubenswrapper[4906]: I0227 08:53:33.100803 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.019187 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-n9r82" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.047507 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcd22869-73ce-4c75-8628-2bf971da33d5-scripts\") pod \"bcd22869-73ce-4c75-8628-2bf971da33d5\" (UID: \"bcd22869-73ce-4c75-8628-2bf971da33d5\") " Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.047642 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcd22869-73ce-4c75-8628-2bf971da33d5-config-data\") pod \"bcd22869-73ce-4c75-8628-2bf971da33d5\" (UID: \"bcd22869-73ce-4c75-8628-2bf971da33d5\") " Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.068484 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcd22869-73ce-4c75-8628-2bf971da33d5-scripts" (OuterVolumeSpecName: "scripts") pod "bcd22869-73ce-4c75-8628-2bf971da33d5" (UID: "bcd22869-73ce-4c75-8628-2bf971da33d5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.082897 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcd22869-73ce-4c75-8628-2bf971da33d5-config-data" (OuterVolumeSpecName: "config-data") pod "bcd22869-73ce-4c75-8628-2bf971da33d5" (UID: "bcd22869-73ce-4c75-8628-2bf971da33d5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.149219 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdg6w\" (UniqueName: \"kubernetes.io/projected/bcd22869-73ce-4c75-8628-2bf971da33d5-kube-api-access-gdg6w\") pod \"bcd22869-73ce-4c75-8628-2bf971da33d5\" (UID: \"bcd22869-73ce-4c75-8628-2bf971da33d5\") " Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.149706 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcd22869-73ce-4c75-8628-2bf971da33d5-combined-ca-bundle\") pod \"bcd22869-73ce-4c75-8628-2bf971da33d5\" (UID: \"bcd22869-73ce-4c75-8628-2bf971da33d5\") " Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.150226 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcd22869-73ce-4c75-8628-2bf971da33d5-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.150248 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcd22869-73ce-4c75-8628-2bf971da33d5-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.153152 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcd22869-73ce-4c75-8628-2bf971da33d5-kube-api-access-gdg6w" (OuterVolumeSpecName: "kube-api-access-gdg6w") pod "bcd22869-73ce-4c75-8628-2bf971da33d5" (UID: "bcd22869-73ce-4c75-8628-2bf971da33d5"). InnerVolumeSpecName "kube-api-access-gdg6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.178139 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcd22869-73ce-4c75-8628-2bf971da33d5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bcd22869-73ce-4c75-8628-2bf971da33d5" (UID: "bcd22869-73ce-4c75-8628-2bf971da33d5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.252263 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcd22869-73ce-4c75-8628-2bf971da33d5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.252306 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdg6w\" (UniqueName: \"kubernetes.io/projected/bcd22869-73ce-4c75-8628-2bf971da33d5-kube-api-access-gdg6w\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.501262 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.501581 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5" containerName="kube-state-metrics" containerID="cri-o://5df7b001615b1d13e96400ca31097b7190334a301e861f5098d45364711254be" gracePeriod=30 Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.606354 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-n9r82" event={"ID":"bcd22869-73ce-4c75-8628-2bf971da33d5","Type":"ContainerDied","Data":"dac167bb6d2d4d90d91364a9501f09e9691b4a3dfccc5b497d87d134dd65eb64"} Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.606418 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dac167bb6d2d4d90d91364a9501f09e9691b4a3dfccc5b497d87d134dd65eb64" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.606499 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-n9r82" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.697098 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 27 08:53:34 crc kubenswrapper[4906]: E0227 08:53:34.697557 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcd22869-73ce-4c75-8628-2bf971da33d5" containerName="nova-cell1-conductor-db-sync" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.697585 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcd22869-73ce-4c75-8628-2bf971da33d5" containerName="nova-cell1-conductor-db-sync" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.697806 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcd22869-73ce-4c75-8628-2bf971da33d5" containerName="nova-cell1-conductor-db-sync" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.698594 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.705142 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.735514 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.867963 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/389f79ec-91c1-40e2-9076-771f9eacb628-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"389f79ec-91c1-40e2-9076-771f9eacb628\") " pod="openstack/nova-cell1-conductor-0" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.868026 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/389f79ec-91c1-40e2-9076-771f9eacb628-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"389f79ec-91c1-40e2-9076-771f9eacb628\") " pod="openstack/nova-cell1-conductor-0" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.868074 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wcf8j\" (UniqueName: \"kubernetes.io/projected/389f79ec-91c1-40e2-9076-771f9eacb628-kube-api-access-wcf8j\") pod \"nova-cell1-conductor-0\" (UID: \"389f79ec-91c1-40e2-9076-771f9eacb628\") " pod="openstack/nova-cell1-conductor-0" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.982279 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/389f79ec-91c1-40e2-9076-771f9eacb628-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"389f79ec-91c1-40e2-9076-771f9eacb628\") " pod="openstack/nova-cell1-conductor-0" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.982352 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/389f79ec-91c1-40e2-9076-771f9eacb628-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"389f79ec-91c1-40e2-9076-771f9eacb628\") " pod="openstack/nova-cell1-conductor-0" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.982417 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wcf8j\" (UniqueName: \"kubernetes.io/projected/389f79ec-91c1-40e2-9076-771f9eacb628-kube-api-access-wcf8j\") pod \"nova-cell1-conductor-0\" (UID: \"389f79ec-91c1-40e2-9076-771f9eacb628\") " pod="openstack/nova-cell1-conductor-0" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.990141 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/389f79ec-91c1-40e2-9076-771f9eacb628-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"389f79ec-91c1-40e2-9076-771f9eacb628\") " pod="openstack/nova-cell1-conductor-0" Feb 27 08:53:34 crc kubenswrapper[4906]: I0227 08:53:34.990575 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/389f79ec-91c1-40e2-9076-771f9eacb628-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"389f79ec-91c1-40e2-9076-771f9eacb628\") " pod="openstack/nova-cell1-conductor-0" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.001664 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wcf8j\" (UniqueName: \"kubernetes.io/projected/389f79ec-91c1-40e2-9076-771f9eacb628-kube-api-access-wcf8j\") pod \"nova-cell1-conductor-0\" (UID: \"389f79ec-91c1-40e2-9076-771f9eacb628\") " pod="openstack/nova-cell1-conductor-0" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.028110 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.144366 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.187401 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rtpf6\" (UniqueName: \"kubernetes.io/projected/eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5-kube-api-access-rtpf6\") pod \"eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5\" (UID: \"eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5\") " Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.213088 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5-kube-api-access-rtpf6" (OuterVolumeSpecName: "kube-api-access-rtpf6") pod "eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5" (UID: "eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5"). InnerVolumeSpecName "kube-api-access-rtpf6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.292334 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rtpf6\" (UniqueName: \"kubernetes.io/projected/eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5-kube-api-access-rtpf6\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.599790 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.642485 4906 generic.go:334] "Generic (PLEG): container finished" podID="eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5" containerID="5df7b001615b1d13e96400ca31097b7190334a301e861f5098d45364711254be" exitCode=2 Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.642588 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5","Type":"ContainerDied","Data":"5df7b001615b1d13e96400ca31097b7190334a301e861f5098d45364711254be"} Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.642624 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5","Type":"ContainerDied","Data":"443ffa2210eed630941173740dbf758c1635870cb8897a6823b716415a1ca308"} Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.642643 4906 scope.go:117] "RemoveContainer" containerID="5df7b001615b1d13e96400ca31097b7190334a301e861f5098d45364711254be" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.642840 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.659023 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"389f79ec-91c1-40e2-9076-771f9eacb628","Type":"ContainerStarted","Data":"0d0e2e798153011fae9a055679224b0a2960dc2ed8189979079d79b46f7a6ca2"} Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.703979 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.726946 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.752796 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Feb 27 08:53:35 crc kubenswrapper[4906]: E0227 08:53:35.753843 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5" containerName="kube-state-metrics" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.753870 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5" containerName="kube-state-metrics" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.754263 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5" containerName="kube-state-metrics" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.755298 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.761850 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.764082 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.782578 4906 scope.go:117] "RemoveContainer" containerID="5df7b001615b1d13e96400ca31097b7190334a301e861f5098d45364711254be" Feb 27 08:53:35 crc kubenswrapper[4906]: E0227 08:53:35.784052 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5df7b001615b1d13e96400ca31097b7190334a301e861f5098d45364711254be\": container with ID starting with 5df7b001615b1d13e96400ca31097b7190334a301e861f5098d45364711254be not found: ID does not exist" containerID="5df7b001615b1d13e96400ca31097b7190334a301e861f5098d45364711254be" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.784096 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5df7b001615b1d13e96400ca31097b7190334a301e861f5098d45364711254be"} err="failed to get container status \"5df7b001615b1d13e96400ca31097b7190334a301e861f5098d45364711254be\": rpc error: code = NotFound desc = could not find container \"5df7b001615b1d13e96400ca31097b7190334a301e861f5098d45364711254be\": container with ID starting with 5df7b001615b1d13e96400ca31097b7190334a301e861f5098d45364711254be not found: ID does not exist" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.802546 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/5f98d1e1-977f-4e20-86c4-e4e580c01f54-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"5f98d1e1-977f-4e20-86c4-e4e580c01f54\") " pod="openstack/kube-state-metrics-0" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.802610 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86z77\" (UniqueName: \"kubernetes.io/projected/5f98d1e1-977f-4e20-86c4-e4e580c01f54-kube-api-access-86z77\") pod \"kube-state-metrics-0\" (UID: \"5f98d1e1-977f-4e20-86c4-e4e580c01f54\") " pod="openstack/kube-state-metrics-0" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.802641 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f98d1e1-977f-4e20-86c4-e4e580c01f54-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"5f98d1e1-977f-4e20-86c4-e4e580c01f54\") " pod="openstack/kube-state-metrics-0" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.802670 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f98d1e1-977f-4e20-86c4-e4e580c01f54-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"5f98d1e1-977f-4e20-86c4-e4e580c01f54\") " pod="openstack/kube-state-metrics-0" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.831361 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.904201 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/5f98d1e1-977f-4e20-86c4-e4e580c01f54-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"5f98d1e1-977f-4e20-86c4-e4e580c01f54\") " pod="openstack/kube-state-metrics-0" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.904271 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86z77\" (UniqueName: \"kubernetes.io/projected/5f98d1e1-977f-4e20-86c4-e4e580c01f54-kube-api-access-86z77\") pod \"kube-state-metrics-0\" (UID: \"5f98d1e1-977f-4e20-86c4-e4e580c01f54\") " pod="openstack/kube-state-metrics-0" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.904300 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f98d1e1-977f-4e20-86c4-e4e580c01f54-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"5f98d1e1-977f-4e20-86c4-e4e580c01f54\") " pod="openstack/kube-state-metrics-0" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.904328 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f98d1e1-977f-4e20-86c4-e4e580c01f54-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"5f98d1e1-977f-4e20-86c4-e4e580c01f54\") " pod="openstack/kube-state-metrics-0" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.909122 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/5f98d1e1-977f-4e20-86c4-e4e580c01f54-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"5f98d1e1-977f-4e20-86c4-e4e580c01f54\") " pod="openstack/kube-state-metrics-0" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.909613 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f98d1e1-977f-4e20-86c4-e4e580c01f54-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"5f98d1e1-977f-4e20-86c4-e4e580c01f54\") " pod="openstack/kube-state-metrics-0" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.910200 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/5f98d1e1-977f-4e20-86c4-e4e580c01f54-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"5f98d1e1-977f-4e20-86c4-e4e580c01f54\") " pod="openstack/kube-state-metrics-0" Feb 27 08:53:35 crc kubenswrapper[4906]: I0227 08:53:35.924474 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86z77\" (UniqueName: \"kubernetes.io/projected/5f98d1e1-977f-4e20-86c4-e4e580c01f54-kube-api-access-86z77\") pod \"kube-state-metrics-0\" (UID: \"5f98d1e1-977f-4e20-86c4-e4e580c01f54\") " pod="openstack/kube-state-metrics-0" Feb 27 08:53:36 crc kubenswrapper[4906]: I0227 08:53:36.119466 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Feb 27 08:53:36 crc kubenswrapper[4906]: I0227 08:53:36.567192 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5" path="/var/lib/kubelet/pods/eeb64c72-5cc6-450f-a67a-11bf4b2bd7b5/volumes" Feb 27 08:53:36 crc kubenswrapper[4906]: I0227 08:53:36.654700 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Feb 27 08:53:36 crc kubenswrapper[4906]: I0227 08:53:36.679997 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5f98d1e1-977f-4e20-86c4-e4e580c01f54","Type":"ContainerStarted","Data":"7650e663477735f0fffe7c09319bf181ada0cb075ab664aac425b29ef7792ed1"} Feb 27 08:53:36 crc kubenswrapper[4906]: I0227 08:53:36.682710 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"389f79ec-91c1-40e2-9076-771f9eacb628","Type":"ContainerStarted","Data":"254e49d8318309923f316f791d59ba4eb2ec628064205529750eb971e8f24f6f"} Feb 27 08:53:36 crc kubenswrapper[4906]: I0227 08:53:36.682865 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Feb 27 08:53:36 crc kubenswrapper[4906]: I0227 08:53:36.707216 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.707186144 podStartE2EDuration="2.707186144s" podCreationTimestamp="2026-02-27 08:53:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:53:36.698731862 +0000 UTC m=+1515.093133472" watchObservedRunningTime="2026-02-27 08:53:36.707186144 +0000 UTC m=+1515.101587754" Feb 27 08:53:36 crc kubenswrapper[4906]: I0227 08:53:36.923756 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 27 08:53:36 crc kubenswrapper[4906]: I0227 08:53:36.923829 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 27 08:53:36 crc kubenswrapper[4906]: I0227 08:53:36.936112 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:53:36 crc kubenswrapper[4906]: I0227 08:53:36.936506 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e4e52025-428e-46db-950c-791f38cf2895" containerName="ceilometer-central-agent" containerID="cri-o://3307850c4df159782d3a7188196431a19b5bfe90f5166364ffa7f7ede4e2590b" gracePeriod=30 Feb 27 08:53:36 crc kubenswrapper[4906]: I0227 08:53:36.936587 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e4e52025-428e-46db-950c-791f38cf2895" containerName="proxy-httpd" containerID="cri-o://3f76341795542d0ee376b586f5bf8ca9f81794a44a9c3b1d4460fb9a09cea1d4" gracePeriod=30 Feb 27 08:53:36 crc kubenswrapper[4906]: I0227 08:53:36.936680 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e4e52025-428e-46db-950c-791f38cf2895" containerName="ceilometer-notification-agent" containerID="cri-o://515fac5d741589707eca3835cea9e9ca69f06d6e8c7f76d4269ffc25dff37984" gracePeriod=30 Feb 27 08:53:36 crc kubenswrapper[4906]: I0227 08:53:36.936612 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e4e52025-428e-46db-950c-791f38cf2895" containerName="sg-core" containerID="cri-o://f71c93a50b50e04b0ad4f47ed7393383ea2dd4f80cffad8c957b77fa5312fbb1" gracePeriod=30 Feb 27 08:53:37 crc kubenswrapper[4906]: I0227 08:53:37.699523 4906 generic.go:334] "Generic (PLEG): container finished" podID="e4e52025-428e-46db-950c-791f38cf2895" containerID="3f76341795542d0ee376b586f5bf8ca9f81794a44a9c3b1d4460fb9a09cea1d4" exitCode=0 Feb 27 08:53:37 crc kubenswrapper[4906]: I0227 08:53:37.700063 4906 generic.go:334] "Generic (PLEG): container finished" podID="e4e52025-428e-46db-950c-791f38cf2895" containerID="f71c93a50b50e04b0ad4f47ed7393383ea2dd4f80cffad8c957b77fa5312fbb1" exitCode=2 Feb 27 08:53:37 crc kubenswrapper[4906]: I0227 08:53:37.700076 4906 generic.go:334] "Generic (PLEG): container finished" podID="e4e52025-428e-46db-950c-791f38cf2895" containerID="3307850c4df159782d3a7188196431a19b5bfe90f5166364ffa7f7ede4e2590b" exitCode=0 Feb 27 08:53:37 crc kubenswrapper[4906]: I0227 08:53:37.699589 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4e52025-428e-46db-950c-791f38cf2895","Type":"ContainerDied","Data":"3f76341795542d0ee376b586f5bf8ca9f81794a44a9c3b1d4460fb9a09cea1d4"} Feb 27 08:53:37 crc kubenswrapper[4906]: I0227 08:53:37.700171 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4e52025-428e-46db-950c-791f38cf2895","Type":"ContainerDied","Data":"f71c93a50b50e04b0ad4f47ed7393383ea2dd4f80cffad8c957b77fa5312fbb1"} Feb 27 08:53:37 crc kubenswrapper[4906]: I0227 08:53:37.700187 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4e52025-428e-46db-950c-791f38cf2895","Type":"ContainerDied","Data":"3307850c4df159782d3a7188196431a19b5bfe90f5166364ffa7f7ede4e2590b"} Feb 27 08:53:37 crc kubenswrapper[4906]: I0227 08:53:37.937125 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="3d883848-7012-4c67-aad8-02d879f33ae5" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.199:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 27 08:53:37 crc kubenswrapper[4906]: I0227 08:53:37.937221 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="3d883848-7012-4c67-aad8-02d879f33ae5" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.199:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 27 08:53:38 crc kubenswrapper[4906]: I0227 08:53:38.100687 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 27 08:53:38 crc kubenswrapper[4906]: I0227 08:53:38.175708 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 27 08:53:38 crc kubenswrapper[4906]: I0227 08:53:38.714340 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5f98d1e1-977f-4e20-86c4-e4e580c01f54","Type":"ContainerStarted","Data":"082e973f5a60e28a6dc49c33622bd7d704f4f689bf5bcf0cbf19f9b5054e51ad"} Feb 27 08:53:38 crc kubenswrapper[4906]: I0227 08:53:38.737397 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.139301042 podStartE2EDuration="3.737369523s" podCreationTimestamp="2026-02-27 08:53:35 +0000 UTC" firstStartedPulling="2026-02-27 08:53:36.670687845 +0000 UTC m=+1515.065089445" lastFinishedPulling="2026-02-27 08:53:38.268756306 +0000 UTC m=+1516.663157926" observedRunningTime="2026-02-27 08:53:38.731434057 +0000 UTC m=+1517.125835667" watchObservedRunningTime="2026-02-27 08:53:38.737369523 +0000 UTC m=+1517.131771133" Feb 27 08:53:38 crc kubenswrapper[4906]: I0227 08:53:38.760022 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 27 08:53:39 crc kubenswrapper[4906]: I0227 08:53:39.746696 4906 generic.go:334] "Generic (PLEG): container finished" podID="e4e52025-428e-46db-950c-791f38cf2895" containerID="515fac5d741589707eca3835cea9e9ca69f06d6e8c7f76d4269ffc25dff37984" exitCode=0 Feb 27 08:53:39 crc kubenswrapper[4906]: I0227 08:53:39.748853 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4e52025-428e-46db-950c-791f38cf2895","Type":"ContainerDied","Data":"515fac5d741589707eca3835cea9e9ca69f06d6e8c7f76d4269ffc25dff37984"} Feb 27 08:53:39 crc kubenswrapper[4906]: I0227 08:53:39.748941 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Feb 27 08:53:39 crc kubenswrapper[4906]: I0227 08:53:39.935751 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.073303 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pgsw4\" (UniqueName: \"kubernetes.io/projected/e4e52025-428e-46db-950c-791f38cf2895-kube-api-access-pgsw4\") pod \"e4e52025-428e-46db-950c-791f38cf2895\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.073757 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4e52025-428e-46db-950c-791f38cf2895-log-httpd\") pod \"e4e52025-428e-46db-950c-791f38cf2895\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.073786 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-combined-ca-bundle\") pod \"e4e52025-428e-46db-950c-791f38cf2895\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.073912 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-config-data\") pod \"e4e52025-428e-46db-950c-791f38cf2895\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.073977 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4e52025-428e-46db-950c-791f38cf2895-run-httpd\") pod \"e4e52025-428e-46db-950c-791f38cf2895\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.074161 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-sg-core-conf-yaml\") pod \"e4e52025-428e-46db-950c-791f38cf2895\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.074220 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-scripts\") pod \"e4e52025-428e-46db-950c-791f38cf2895\" (UID: \"e4e52025-428e-46db-950c-791f38cf2895\") " Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.074318 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4e52025-428e-46db-950c-791f38cf2895-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e4e52025-428e-46db-950c-791f38cf2895" (UID: "e4e52025-428e-46db-950c-791f38cf2895"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.074485 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e4e52025-428e-46db-950c-791f38cf2895-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e4e52025-428e-46db-950c-791f38cf2895" (UID: "e4e52025-428e-46db-950c-791f38cf2895"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.075958 4906 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4e52025-428e-46db-950c-791f38cf2895-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.075988 4906 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e4e52025-428e-46db-950c-791f38cf2895-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.099312 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4e52025-428e-46db-950c-791f38cf2895-kube-api-access-pgsw4" (OuterVolumeSpecName: "kube-api-access-pgsw4") pod "e4e52025-428e-46db-950c-791f38cf2895" (UID: "e4e52025-428e-46db-950c-791f38cf2895"). InnerVolumeSpecName "kube-api-access-pgsw4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.108278 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-scripts" (OuterVolumeSpecName: "scripts") pod "e4e52025-428e-46db-950c-791f38cf2895" (UID: "e4e52025-428e-46db-950c-791f38cf2895"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.114446 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e4e52025-428e-46db-950c-791f38cf2895" (UID: "e4e52025-428e-46db-950c-791f38cf2895"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.170703 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e4e52025-428e-46db-950c-791f38cf2895" (UID: "e4e52025-428e-46db-950c-791f38cf2895"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.186190 4906 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.186252 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.186267 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pgsw4\" (UniqueName: \"kubernetes.io/projected/e4e52025-428e-46db-950c-791f38cf2895-kube-api-access-pgsw4\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.186295 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.200535 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-config-data" (OuterVolumeSpecName: "config-data") pod "e4e52025-428e-46db-950c-791f38cf2895" (UID: "e4e52025-428e-46db-950c-791f38cf2895"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.288226 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e4e52025-428e-46db-950c-791f38cf2895-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.760667 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e4e52025-428e-46db-950c-791f38cf2895","Type":"ContainerDied","Data":"a2e4e8b23dd8cc370e767a253ce6020795527a25909dc80ff910cf5191f85699"} Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.760707 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.760747 4906 scope.go:117] "RemoveContainer" containerID="3f76341795542d0ee376b586f5bf8ca9f81794a44a9c3b1d4460fb9a09cea1d4" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.798104 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.811510 4906 scope.go:117] "RemoveContainer" containerID="f71c93a50b50e04b0ad4f47ed7393383ea2dd4f80cffad8c957b77fa5312fbb1" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.818047 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.841834 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:53:40 crc kubenswrapper[4906]: E0227 08:53:40.842929 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4e52025-428e-46db-950c-791f38cf2895" containerName="ceilometer-central-agent" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.843022 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4e52025-428e-46db-950c-791f38cf2895" containerName="ceilometer-central-agent" Feb 27 08:53:40 crc kubenswrapper[4906]: E0227 08:53:40.843149 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4e52025-428e-46db-950c-791f38cf2895" containerName="proxy-httpd" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.843229 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4e52025-428e-46db-950c-791f38cf2895" containerName="proxy-httpd" Feb 27 08:53:40 crc kubenswrapper[4906]: E0227 08:53:40.843337 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4e52025-428e-46db-950c-791f38cf2895" containerName="sg-core" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.843389 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4e52025-428e-46db-950c-791f38cf2895" containerName="sg-core" Feb 27 08:53:40 crc kubenswrapper[4906]: E0227 08:53:40.843514 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4e52025-428e-46db-950c-791f38cf2895" containerName="ceilometer-notification-agent" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.843597 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4e52025-428e-46db-950c-791f38cf2895" containerName="ceilometer-notification-agent" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.843940 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4e52025-428e-46db-950c-791f38cf2895" containerName="proxy-httpd" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.844239 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4e52025-428e-46db-950c-791f38cf2895" containerName="ceilometer-central-agent" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.849305 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4e52025-428e-46db-950c-791f38cf2895" containerName="ceilometer-notification-agent" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.849413 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4e52025-428e-46db-950c-791f38cf2895" containerName="sg-core" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.851573 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.853639 4906 scope.go:117] "RemoveContainer" containerID="515fac5d741589707eca3835cea9e9ca69f06d6e8c7f76d4269ffc25dff37984" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.854610 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.854775 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.855630 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.879517 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.926040 4906 scope.go:117] "RemoveContainer" containerID="3307850c4df159782d3a7188196431a19b5bfe90f5166364ffa7f7ede4e2590b" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.947468 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 27 08:53:40 crc kubenswrapper[4906]: I0227 08:53:40.947529 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.004278 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.004376 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-config-data\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.004426 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.004443 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.004469 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-scripts\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.004560 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19bc2263-4fae-48d4-b3f2-9da97ecec904-log-httpd\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.004670 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19bc2263-4fae-48d4-b3f2-9da97ecec904-run-httpd\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.004698 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kc8q\" (UniqueName: \"kubernetes.io/projected/19bc2263-4fae-48d4-b3f2-9da97ecec904-kube-api-access-2kc8q\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.106520 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19bc2263-4fae-48d4-b3f2-9da97ecec904-run-httpd\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.106571 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kc8q\" (UniqueName: \"kubernetes.io/projected/19bc2263-4fae-48d4-b3f2-9da97ecec904-kube-api-access-2kc8q\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.106647 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.106709 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-config-data\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.106763 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.106779 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.106802 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-scripts\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.106854 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19bc2263-4fae-48d4-b3f2-9da97ecec904-log-httpd\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.107361 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19bc2263-4fae-48d4-b3f2-9da97ecec904-run-httpd\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.107674 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19bc2263-4fae-48d4-b3f2-9da97ecec904-log-httpd\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.111415 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.112098 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-config-data\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.112835 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.114044 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-scripts\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.114943 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.126741 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kc8q\" (UniqueName: \"kubernetes.io/projected/19bc2263-4fae-48d4-b3f2-9da97ecec904-kube-api-access-2kc8q\") pod \"ceilometer-0\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.205935 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.712202 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:53:41 crc kubenswrapper[4906]: I0227 08:53:41.771694 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19bc2263-4fae-48d4-b3f2-9da97ecec904","Type":"ContainerStarted","Data":"50b063e61ea80f533550a06b42c606a7b17ce853f99558c0cfe2500d898cab24"} Feb 27 08:53:42 crc kubenswrapper[4906]: I0227 08:53:42.030122 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d4bebfbe-166b-40f6-9b9e-f0fcc20e6292" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.201:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 27 08:53:42 crc kubenswrapper[4906]: I0227 08:53:42.030389 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d4bebfbe-166b-40f6-9b9e-f0fcc20e6292" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.201:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 27 08:53:42 crc kubenswrapper[4906]: I0227 08:53:42.571437 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4e52025-428e-46db-950c-791f38cf2895" path="/var/lib/kubelet/pods/e4e52025-428e-46db-950c-791f38cf2895/volumes" Feb 27 08:53:42 crc kubenswrapper[4906]: I0227 08:53:42.784348 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19bc2263-4fae-48d4-b3f2-9da97ecec904","Type":"ContainerStarted","Data":"767db5d4f6a13cd8c6fda3f8b9f1b9c713db43ed0d24ecf006b3570710b710a1"} Feb 27 08:53:44 crc kubenswrapper[4906]: I0227 08:53:44.849125 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19bc2263-4fae-48d4-b3f2-9da97ecec904","Type":"ContainerStarted","Data":"927e6f2c3c74fb3e52e37ddb7b3c764bf2550fcb465d2be4491cd3a32e857ca9"} Feb 27 08:53:45 crc kubenswrapper[4906]: I0227 08:53:45.062704 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Feb 27 08:53:45 crc kubenswrapper[4906]: I0227 08:53:45.863036 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19bc2263-4fae-48d4-b3f2-9da97ecec904","Type":"ContainerStarted","Data":"b408342ffb13f9b4ae1e5a4d094e6ab66fdf552733a281843b890475d57422d8"} Feb 27 08:53:46 crc kubenswrapper[4906]: I0227 08:53:46.143003 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Feb 27 08:53:46 crc kubenswrapper[4906]: I0227 08:53:46.932710 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 27 08:53:46 crc kubenswrapper[4906]: I0227 08:53:46.942686 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 27 08:53:46 crc kubenswrapper[4906]: I0227 08:53:46.949945 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 27 08:53:47 crc kubenswrapper[4906]: E0227 08:53:47.602651 4906 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeeb64c72_5cc6_450f_a67a_11bf4b2bd7b5.slice/crio-conmon-5df7b001615b1d13e96400ca31097b7190334a301e861f5098d45364711254be.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbcd22869_73ce_4c75_8628_2bf971da33d5.slice/crio-155a4e047dac2c401b8f299eb631cf5e030fa9aa5a6ce35a4e8ed5a7b5c28f94.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbcd22869_73ce_4c75_8628_2bf971da33d5.slice/crio-dac167bb6d2d4d90d91364a9501f09e9691b4a3dfccc5b497d87d134dd65eb64\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbcd22869_73ce_4c75_8628_2bf971da33d5.slice/crio-conmon-155a4e047dac2c401b8f299eb631cf5e030fa9aa5a6ce35a4e8ed5a7b5c28f94.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4e52025_428e_46db_950c_791f38cf2895.slice/crio-3f76341795542d0ee376b586f5bf8ca9f81794a44a9c3b1d4460fb9a09cea1d4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4e52025_428e_46db_950c_791f38cf2895.slice/crio-f71c93a50b50e04b0ad4f47ed7393383ea2dd4f80cffad8c957b77fa5312fbb1.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeeb64c72_5cc6_450f_a67a_11bf4b2bd7b5.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeeb64c72_5cc6_450f_a67a_11bf4b2bd7b5.slice/crio-443ffa2210eed630941173740dbf758c1635870cb8897a6823b716415a1ca308\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4e52025_428e_46db_950c_791f38cf2895.slice/crio-conmon-3307850c4df159782d3a7188196431a19b5bfe90f5166364ffa7f7ede4e2590b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4e52025_428e_46db_950c_791f38cf2895.slice/crio-515fac5d741589707eca3835cea9e9ca69f06d6e8c7f76d4269ffc25dff37984.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4e52025_428e_46db_950c_791f38cf2895.slice/crio-conmon-3f76341795542d0ee376b586f5bf8ca9f81794a44a9c3b1d4460fb9a09cea1d4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4e52025_428e_46db_950c_791f38cf2895.slice/crio-3307850c4df159782d3a7188196431a19b5bfe90f5166364ffa7f7ede4e2590b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeeb64c72_5cc6_450f_a67a_11bf4b2bd7b5.slice/crio-5df7b001615b1d13e96400ca31097b7190334a301e861f5098d45364711254be.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4e52025_428e_46db_950c_791f38cf2895.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf12ff417_46e1_4950_9e28_9a41afadd152.slice/crio-056ca9574bf8e8dd58188f5a3c93e0ff05838ff85ef86b5af5672bea195c4055.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbcd22869_73ce_4c75_8628_2bf971da33d5.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4e52025_428e_46db_950c_791f38cf2895.slice/crio-conmon-f71c93a50b50e04b0ad4f47ed7393383ea2dd4f80cffad8c957b77fa5312fbb1.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf12ff417_46e1_4950_9e28_9a41afadd152.slice/crio-conmon-056ca9574bf8e8dd58188f5a3c93e0ff05838ff85ef86b5af5672bea195c4055.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4e52025_428e_46db_950c_791f38cf2895.slice/crio-a2e4e8b23dd8cc370e767a253ce6020795527a25909dc80ff910cf5191f85699\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4e52025_428e_46db_950c_791f38cf2895.slice/crio-conmon-515fac5d741589707eca3835cea9e9ca69f06d6e8c7f76d4269ffc25dff37984.scope\": RecentStats: unable to find data in memory cache]" Feb 27 08:53:47 crc kubenswrapper[4906]: I0227 08:53:47.845889 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:47 crc kubenswrapper[4906]: I0227 08:53:47.910044 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f12ff417-46e1-4950-9e28-9a41afadd152-combined-ca-bundle\") pod \"f12ff417-46e1-4950-9e28-9a41afadd152\" (UID: \"f12ff417-46e1-4950-9e28-9a41afadd152\") " Feb 27 08:53:47 crc kubenswrapper[4906]: I0227 08:53:47.910654 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j958f\" (UniqueName: \"kubernetes.io/projected/f12ff417-46e1-4950-9e28-9a41afadd152-kube-api-access-j958f\") pod \"f12ff417-46e1-4950-9e28-9a41afadd152\" (UID: \"f12ff417-46e1-4950-9e28-9a41afadd152\") " Feb 27 08:53:47 crc kubenswrapper[4906]: I0227 08:53:47.910758 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f12ff417-46e1-4950-9e28-9a41afadd152-config-data\") pod \"f12ff417-46e1-4950-9e28-9a41afadd152\" (UID: \"f12ff417-46e1-4950-9e28-9a41afadd152\") " Feb 27 08:53:47 crc kubenswrapper[4906]: I0227 08:53:47.938071 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f12ff417-46e1-4950-9e28-9a41afadd152-kube-api-access-j958f" (OuterVolumeSpecName: "kube-api-access-j958f") pod "f12ff417-46e1-4950-9e28-9a41afadd152" (UID: "f12ff417-46e1-4950-9e28-9a41afadd152"). InnerVolumeSpecName "kube-api-access-j958f". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:53:47 crc kubenswrapper[4906]: I0227 08:53:47.942897 4906 generic.go:334] "Generic (PLEG): container finished" podID="f12ff417-46e1-4950-9e28-9a41afadd152" containerID="056ca9574bf8e8dd58188f5a3c93e0ff05838ff85ef86b5af5672bea195c4055" exitCode=137 Feb 27 08:53:47 crc kubenswrapper[4906]: I0227 08:53:47.944854 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:47 crc kubenswrapper[4906]: I0227 08:53:47.948001 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f12ff417-46e1-4950-9e28-9a41afadd152","Type":"ContainerDied","Data":"056ca9574bf8e8dd58188f5a3c93e0ff05838ff85ef86b5af5672bea195c4055"} Feb 27 08:53:47 crc kubenswrapper[4906]: I0227 08:53:47.948080 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"f12ff417-46e1-4950-9e28-9a41afadd152","Type":"ContainerDied","Data":"d5b493d88fb6d0ce2397e784109c432ebaa69e095d4485cdeccae412b2830225"} Feb 27 08:53:47 crc kubenswrapper[4906]: I0227 08:53:47.948099 4906 scope.go:117] "RemoveContainer" containerID="056ca9574bf8e8dd58188f5a3c93e0ff05838ff85ef86b5af5672bea195c4055" Feb 27 08:53:47 crc kubenswrapper[4906]: I0227 08:53:47.962290 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f12ff417-46e1-4950-9e28-9a41afadd152-config-data" (OuterVolumeSpecName: "config-data") pod "f12ff417-46e1-4950-9e28-9a41afadd152" (UID: "f12ff417-46e1-4950-9e28-9a41afadd152"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:47 crc kubenswrapper[4906]: I0227 08:53:47.977009 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f12ff417-46e1-4950-9e28-9a41afadd152-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f12ff417-46e1-4950-9e28-9a41afadd152" (UID: "f12ff417-46e1-4950-9e28-9a41afadd152"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.002370 4906 scope.go:117] "RemoveContainer" containerID="056ca9574bf8e8dd58188f5a3c93e0ff05838ff85ef86b5af5672bea195c4055" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.002391 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 27 08:53:48 crc kubenswrapper[4906]: E0227 08:53:48.003529 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"056ca9574bf8e8dd58188f5a3c93e0ff05838ff85ef86b5af5672bea195c4055\": container with ID starting with 056ca9574bf8e8dd58188f5a3c93e0ff05838ff85ef86b5af5672bea195c4055 not found: ID does not exist" containerID="056ca9574bf8e8dd58188f5a3c93e0ff05838ff85ef86b5af5672bea195c4055" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.003607 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"056ca9574bf8e8dd58188f5a3c93e0ff05838ff85ef86b5af5672bea195c4055"} err="failed to get container status \"056ca9574bf8e8dd58188f5a3c93e0ff05838ff85ef86b5af5672bea195c4055\": rpc error: code = NotFound desc = could not find container \"056ca9574bf8e8dd58188f5a3c93e0ff05838ff85ef86b5af5672bea195c4055\": container with ID starting with 056ca9574bf8e8dd58188f5a3c93e0ff05838ff85ef86b5af5672bea195c4055 not found: ID does not exist" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.012165 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j958f\" (UniqueName: \"kubernetes.io/projected/f12ff417-46e1-4950-9e28-9a41afadd152-kube-api-access-j958f\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.012202 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f12ff417-46e1-4950-9e28-9a41afadd152-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.012211 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f12ff417-46e1-4950-9e28-9a41afadd152-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.300098 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.311998 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.351719 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 27 08:53:48 crc kubenswrapper[4906]: E0227 08:53:48.352174 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f12ff417-46e1-4950-9e28-9a41afadd152" containerName="nova-cell1-novncproxy-novncproxy" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.352189 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f12ff417-46e1-4950-9e28-9a41afadd152" containerName="nova-cell1-novncproxy-novncproxy" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.352388 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f12ff417-46e1-4950-9e28-9a41afadd152" containerName="nova-cell1-novncproxy-novncproxy" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.353051 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.358363 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.358438 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.358574 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.376071 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.419133 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/460d94e6-b4e6-4248-9191-a5930f468875-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"460d94e6-b4e6-4248-9191-a5930f468875\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.419391 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nb8w5\" (UniqueName: \"kubernetes.io/projected/460d94e6-b4e6-4248-9191-a5930f468875-kube-api-access-nb8w5\") pod \"nova-cell1-novncproxy-0\" (UID: \"460d94e6-b4e6-4248-9191-a5930f468875\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.419484 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/460d94e6-b4e6-4248-9191-a5930f468875-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"460d94e6-b4e6-4248-9191-a5930f468875\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.419520 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/460d94e6-b4e6-4248-9191-a5930f468875-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"460d94e6-b4e6-4248-9191-a5930f468875\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.419561 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/460d94e6-b4e6-4248-9191-a5930f468875-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"460d94e6-b4e6-4248-9191-a5930f468875\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.521957 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nb8w5\" (UniqueName: \"kubernetes.io/projected/460d94e6-b4e6-4248-9191-a5930f468875-kube-api-access-nb8w5\") pod \"nova-cell1-novncproxy-0\" (UID: \"460d94e6-b4e6-4248-9191-a5930f468875\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.522090 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/460d94e6-b4e6-4248-9191-a5930f468875-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"460d94e6-b4e6-4248-9191-a5930f468875\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.522128 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/460d94e6-b4e6-4248-9191-a5930f468875-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"460d94e6-b4e6-4248-9191-a5930f468875\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.522162 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/460d94e6-b4e6-4248-9191-a5930f468875-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"460d94e6-b4e6-4248-9191-a5930f468875\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.522226 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/460d94e6-b4e6-4248-9191-a5930f468875-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"460d94e6-b4e6-4248-9191-a5930f468875\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.533014 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/460d94e6-b4e6-4248-9191-a5930f468875-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"460d94e6-b4e6-4248-9191-a5930f468875\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.533014 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/460d94e6-b4e6-4248-9191-a5930f468875-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"460d94e6-b4e6-4248-9191-a5930f468875\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.533497 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/460d94e6-b4e6-4248-9191-a5930f468875-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"460d94e6-b4e6-4248-9191-a5930f468875\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.533800 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/460d94e6-b4e6-4248-9191-a5930f468875-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"460d94e6-b4e6-4248-9191-a5930f468875\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.549706 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nb8w5\" (UniqueName: \"kubernetes.io/projected/460d94e6-b4e6-4248-9191-a5930f468875-kube-api-access-nb8w5\") pod \"nova-cell1-novncproxy-0\" (UID: \"460d94e6-b4e6-4248-9191-a5930f468875\") " pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.568318 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f12ff417-46e1-4950-9e28-9a41afadd152" path="/var/lib/kubelet/pods/f12ff417-46e1-4950-9e28-9a41afadd152/volumes" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.759559 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.971716 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19bc2263-4fae-48d4-b3f2-9da97ecec904","Type":"ContainerStarted","Data":"bf58f92e1705c5bcb31cb0f054b889ce863b23e7daa44ed13a90d4525878eeb8"} Feb 27 08:53:48 crc kubenswrapper[4906]: I0227 08:53:48.972060 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 27 08:53:49 crc kubenswrapper[4906]: I0227 08:53:49.013135 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.188081694 podStartE2EDuration="9.013101246s" podCreationTimestamp="2026-02-27 08:53:40 +0000 UTC" firstStartedPulling="2026-02-27 08:53:41.719069452 +0000 UTC m=+1520.113471062" lastFinishedPulling="2026-02-27 08:53:47.544089004 +0000 UTC m=+1525.938490614" observedRunningTime="2026-02-27 08:53:49.004084959 +0000 UTC m=+1527.398486569" watchObservedRunningTime="2026-02-27 08:53:49.013101246 +0000 UTC m=+1527.407502866" Feb 27 08:53:49 crc kubenswrapper[4906]: I0227 08:53:49.273199 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Feb 27 08:53:49 crc kubenswrapper[4906]: W0227 08:53:49.277142 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod460d94e6_b4e6_4248_9191_a5930f468875.slice/crio-56587d8bdc37c394a86acf13977e901f7df69df1f8d116d82a24a083ce96269d WatchSource:0}: Error finding container 56587d8bdc37c394a86acf13977e901f7df69df1f8d116d82a24a083ce96269d: Status 404 returned error can't find the container with id 56587d8bdc37c394a86acf13977e901f7df69df1f8d116d82a24a083ce96269d Feb 27 08:53:49 crc kubenswrapper[4906]: I0227 08:53:49.984764 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"460d94e6-b4e6-4248-9191-a5930f468875","Type":"ContainerStarted","Data":"4b0ad7a3e3bf851be965dbce5e389e1a49e755f08a2bd0a3962eece721ecbdfb"} Feb 27 08:53:49 crc kubenswrapper[4906]: I0227 08:53:49.986272 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"460d94e6-b4e6-4248-9191-a5930f468875","Type":"ContainerStarted","Data":"56587d8bdc37c394a86acf13977e901f7df69df1f8d116d82a24a083ce96269d"} Feb 27 08:53:50 crc kubenswrapper[4906]: I0227 08:53:50.009570 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.009545315 podStartE2EDuration="2.009545315s" podCreationTimestamp="2026-02-27 08:53:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:53:50.006601318 +0000 UTC m=+1528.401002938" watchObservedRunningTime="2026-02-27 08:53:50.009545315 +0000 UTC m=+1528.403946925" Feb 27 08:53:50 crc kubenswrapper[4906]: I0227 08:53:50.951483 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 27 08:53:50 crc kubenswrapper[4906]: I0227 08:53:50.952798 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 27 08:53:50 crc kubenswrapper[4906]: I0227 08:53:50.958318 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 27 08:53:50 crc kubenswrapper[4906]: I0227 08:53:50.969527 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.008373 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.026582 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.233970 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85649f948c-rhmfs"] Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.236264 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.260531 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85649f948c-rhmfs"] Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.321536 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-ovsdbserver-sb\") pod \"dnsmasq-dns-85649f948c-rhmfs\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.321597 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22ppv\" (UniqueName: \"kubernetes.io/projected/77e1c833-7b25-4d3d-bd7a-d24f619d4966-kube-api-access-22ppv\") pod \"dnsmasq-dns-85649f948c-rhmfs\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.321668 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-config\") pod \"dnsmasq-dns-85649f948c-rhmfs\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.321707 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-dns-svc\") pod \"dnsmasq-dns-85649f948c-rhmfs\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.321794 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-ovsdbserver-nb\") pod \"dnsmasq-dns-85649f948c-rhmfs\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.321830 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-dns-swift-storage-0\") pod \"dnsmasq-dns-85649f948c-rhmfs\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.423385 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-ovsdbserver-sb\") pod \"dnsmasq-dns-85649f948c-rhmfs\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.423451 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22ppv\" (UniqueName: \"kubernetes.io/projected/77e1c833-7b25-4d3d-bd7a-d24f619d4966-kube-api-access-22ppv\") pod \"dnsmasq-dns-85649f948c-rhmfs\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.423506 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-config\") pod \"dnsmasq-dns-85649f948c-rhmfs\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.423538 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-dns-svc\") pod \"dnsmasq-dns-85649f948c-rhmfs\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.423609 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-ovsdbserver-nb\") pod \"dnsmasq-dns-85649f948c-rhmfs\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.423645 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-dns-swift-storage-0\") pod \"dnsmasq-dns-85649f948c-rhmfs\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.424617 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-ovsdbserver-sb\") pod \"dnsmasq-dns-85649f948c-rhmfs\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.424664 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-dns-svc\") pod \"dnsmasq-dns-85649f948c-rhmfs\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.424637 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-config\") pod \"dnsmasq-dns-85649f948c-rhmfs\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.424751 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-dns-swift-storage-0\") pod \"dnsmasq-dns-85649f948c-rhmfs\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.425478 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-ovsdbserver-nb\") pod \"dnsmasq-dns-85649f948c-rhmfs\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.465828 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22ppv\" (UniqueName: \"kubernetes.io/projected/77e1c833-7b25-4d3d-bd7a-d24f619d4966-kube-api-access-22ppv\") pod \"dnsmasq-dns-85649f948c-rhmfs\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:51 crc kubenswrapper[4906]: I0227 08:53:51.570595 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:52 crc kubenswrapper[4906]: I0227 08:53:52.159400 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85649f948c-rhmfs"] Feb 27 08:53:52 crc kubenswrapper[4906]: W0227 08:53:52.166703 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod77e1c833_7b25_4d3d_bd7a_d24f619d4966.slice/crio-fc52f0af8659db6b46695fb31f4744f07e5d64aafd97471aeac65142ebdba4e0 WatchSource:0}: Error finding container fc52f0af8659db6b46695fb31f4744f07e5d64aafd97471aeac65142ebdba4e0: Status 404 returned error can't find the container with id fc52f0af8659db6b46695fb31f4744f07e5d64aafd97471aeac65142ebdba4e0 Feb 27 08:53:53 crc kubenswrapper[4906]: I0227 08:53:53.026043 4906 generic.go:334] "Generic (PLEG): container finished" podID="77e1c833-7b25-4d3d-bd7a-d24f619d4966" containerID="8fa831683e89f27f7d0c5c2f71ad0f2020ae19b014e010d61ca4c141750d686b" exitCode=0 Feb 27 08:53:53 crc kubenswrapper[4906]: I0227 08:53:53.026130 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85649f948c-rhmfs" event={"ID":"77e1c833-7b25-4d3d-bd7a-d24f619d4966","Type":"ContainerDied","Data":"8fa831683e89f27f7d0c5c2f71ad0f2020ae19b014e010d61ca4c141750d686b"} Feb 27 08:53:53 crc kubenswrapper[4906]: I0227 08:53:53.026956 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85649f948c-rhmfs" event={"ID":"77e1c833-7b25-4d3d-bd7a-d24f619d4966","Type":"ContainerStarted","Data":"fc52f0af8659db6b46695fb31f4744f07e5d64aafd97471aeac65142ebdba4e0"} Feb 27 08:53:53 crc kubenswrapper[4906]: I0227 08:53:53.669501 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 27 08:53:53 crc kubenswrapper[4906]: I0227 08:53:53.761292 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:54 crc kubenswrapper[4906]: I0227 08:53:54.040687 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d4bebfbe-166b-40f6-9b9e-f0fcc20e6292" containerName="nova-api-log" containerID="cri-o://1a3580e83896166cc75f475eb68df842375f4c489966a978ea7fbb6ed676e61a" gracePeriod=30 Feb 27 08:53:54 crc kubenswrapper[4906]: I0227 08:53:54.040815 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85649f948c-rhmfs" event={"ID":"77e1c833-7b25-4d3d-bd7a-d24f619d4966","Type":"ContainerStarted","Data":"ccb5f712a19a1f8e866e4a777195e5252c2425b1dcb6dc5b7a9ae3044da1e04e"} Feb 27 08:53:54 crc kubenswrapper[4906]: I0227 08:53:54.041300 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:53:54 crc kubenswrapper[4906]: I0227 08:53:54.040949 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d4bebfbe-166b-40f6-9b9e-f0fcc20e6292" containerName="nova-api-api" containerID="cri-o://cfcca24aa51c77dcd3566e35f01ad6c1e1cccf76e8fcb2234aca39befd64b639" gracePeriod=30 Feb 27 08:53:54 crc kubenswrapper[4906]: I0227 08:53:54.099964 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-85649f948c-rhmfs" podStartSLOduration=3.099930331 podStartE2EDuration="3.099930331s" podCreationTimestamp="2026-02-27 08:53:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:53:54.085124962 +0000 UTC m=+1532.479526572" watchObservedRunningTime="2026-02-27 08:53:54.099930331 +0000 UTC m=+1532.494331941" Feb 27 08:53:54 crc kubenswrapper[4906]: I0227 08:53:54.791371 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:53:54 crc kubenswrapper[4906]: I0227 08:53:54.792128 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="19bc2263-4fae-48d4-b3f2-9da97ecec904" containerName="ceilometer-central-agent" containerID="cri-o://767db5d4f6a13cd8c6fda3f8b9f1b9c713db43ed0d24ecf006b3570710b710a1" gracePeriod=30 Feb 27 08:53:54 crc kubenswrapper[4906]: I0227 08:53:54.792268 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="19bc2263-4fae-48d4-b3f2-9da97ecec904" containerName="sg-core" containerID="cri-o://b408342ffb13f9b4ae1e5a4d094e6ab66fdf552733a281843b890475d57422d8" gracePeriod=30 Feb 27 08:53:54 crc kubenswrapper[4906]: I0227 08:53:54.792311 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="19bc2263-4fae-48d4-b3f2-9da97ecec904" containerName="ceilometer-notification-agent" containerID="cri-o://927e6f2c3c74fb3e52e37ddb7b3c764bf2550fcb465d2be4491cd3a32e857ca9" gracePeriod=30 Feb 27 08:53:54 crc kubenswrapper[4906]: I0227 08:53:54.792287 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="19bc2263-4fae-48d4-b3f2-9da97ecec904" containerName="proxy-httpd" containerID="cri-o://bf58f92e1705c5bcb31cb0f054b889ce863b23e7daa44ed13a90d4525878eeb8" gracePeriod=30 Feb 27 08:53:54 crc kubenswrapper[4906]: I0227 08:53:54.845008 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:53:54 crc kubenswrapper[4906]: I0227 08:53:54.845146 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:53:55 crc kubenswrapper[4906]: I0227 08:53:55.053254 4906 generic.go:334] "Generic (PLEG): container finished" podID="d4bebfbe-166b-40f6-9b9e-f0fcc20e6292" containerID="1a3580e83896166cc75f475eb68df842375f4c489966a978ea7fbb6ed676e61a" exitCode=143 Feb 27 08:53:55 crc kubenswrapper[4906]: I0227 08:53:55.053298 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292","Type":"ContainerDied","Data":"1a3580e83896166cc75f475eb68df842375f4c489966a978ea7fbb6ed676e61a"} Feb 27 08:53:55 crc kubenswrapper[4906]: I0227 08:53:55.056601 4906 generic.go:334] "Generic (PLEG): container finished" podID="19bc2263-4fae-48d4-b3f2-9da97ecec904" containerID="bf58f92e1705c5bcb31cb0f054b889ce863b23e7daa44ed13a90d4525878eeb8" exitCode=0 Feb 27 08:53:55 crc kubenswrapper[4906]: I0227 08:53:55.056646 4906 generic.go:334] "Generic (PLEG): container finished" podID="19bc2263-4fae-48d4-b3f2-9da97ecec904" containerID="b408342ffb13f9b4ae1e5a4d094e6ab66fdf552733a281843b890475d57422d8" exitCode=2 Feb 27 08:53:55 crc kubenswrapper[4906]: I0227 08:53:55.056694 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19bc2263-4fae-48d4-b3f2-9da97ecec904","Type":"ContainerDied","Data":"bf58f92e1705c5bcb31cb0f054b889ce863b23e7daa44ed13a90d4525878eeb8"} Feb 27 08:53:55 crc kubenswrapper[4906]: I0227 08:53:55.056764 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19bc2263-4fae-48d4-b3f2-9da97ecec904","Type":"ContainerDied","Data":"b408342ffb13f9b4ae1e5a4d094e6ab66fdf552733a281843b890475d57422d8"} Feb 27 08:53:55 crc kubenswrapper[4906]: I0227 08:53:55.888196 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:53:55 crc kubenswrapper[4906]: I0227 08:53:55.949516 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19bc2263-4fae-48d4-b3f2-9da97ecec904-log-httpd\") pod \"19bc2263-4fae-48d4-b3f2-9da97ecec904\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " Feb 27 08:53:55 crc kubenswrapper[4906]: I0227 08:53:55.949641 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-combined-ca-bundle\") pod \"19bc2263-4fae-48d4-b3f2-9da97ecec904\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " Feb 27 08:53:55 crc kubenswrapper[4906]: I0227 08:53:55.949715 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19bc2263-4fae-48d4-b3f2-9da97ecec904-run-httpd\") pod \"19bc2263-4fae-48d4-b3f2-9da97ecec904\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " Feb 27 08:53:55 crc kubenswrapper[4906]: I0227 08:53:55.949762 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-scripts\") pod \"19bc2263-4fae-48d4-b3f2-9da97ecec904\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " Feb 27 08:53:55 crc kubenswrapper[4906]: I0227 08:53:55.949824 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-sg-core-conf-yaml\") pod \"19bc2263-4fae-48d4-b3f2-9da97ecec904\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " Feb 27 08:53:55 crc kubenswrapper[4906]: I0227 08:53:55.949873 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-config-data\") pod \"19bc2263-4fae-48d4-b3f2-9da97ecec904\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " Feb 27 08:53:55 crc kubenswrapper[4906]: I0227 08:53:55.949940 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2kc8q\" (UniqueName: \"kubernetes.io/projected/19bc2263-4fae-48d4-b3f2-9da97ecec904-kube-api-access-2kc8q\") pod \"19bc2263-4fae-48d4-b3f2-9da97ecec904\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " Feb 27 08:53:55 crc kubenswrapper[4906]: I0227 08:53:55.950035 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-ceilometer-tls-certs\") pod \"19bc2263-4fae-48d4-b3f2-9da97ecec904\" (UID: \"19bc2263-4fae-48d4-b3f2-9da97ecec904\") " Feb 27 08:53:55 crc kubenswrapper[4906]: I0227 08:53:55.951041 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19bc2263-4fae-48d4-b3f2-9da97ecec904-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "19bc2263-4fae-48d4-b3f2-9da97ecec904" (UID: "19bc2263-4fae-48d4-b3f2-9da97ecec904"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:53:55 crc kubenswrapper[4906]: I0227 08:53:55.951068 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/19bc2263-4fae-48d4-b3f2-9da97ecec904-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "19bc2263-4fae-48d4-b3f2-9da97ecec904" (UID: "19bc2263-4fae-48d4-b3f2-9da97ecec904"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:53:55 crc kubenswrapper[4906]: I0227 08:53:55.965470 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-scripts" (OuterVolumeSpecName: "scripts") pod "19bc2263-4fae-48d4-b3f2-9da97ecec904" (UID: "19bc2263-4fae-48d4-b3f2-9da97ecec904"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:55 crc kubenswrapper[4906]: I0227 08:53:55.967081 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19bc2263-4fae-48d4-b3f2-9da97ecec904-kube-api-access-2kc8q" (OuterVolumeSpecName: "kube-api-access-2kc8q") pod "19bc2263-4fae-48d4-b3f2-9da97ecec904" (UID: "19bc2263-4fae-48d4-b3f2-9da97ecec904"). InnerVolumeSpecName "kube-api-access-2kc8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:53:55 crc kubenswrapper[4906]: I0227 08:53:55.991203 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "19bc2263-4fae-48d4-b3f2-9da97ecec904" (UID: "19bc2263-4fae-48d4-b3f2-9da97ecec904"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.027042 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "19bc2263-4fae-48d4-b3f2-9da97ecec904" (UID: "19bc2263-4fae-48d4-b3f2-9da97ecec904"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.047597 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "19bc2263-4fae-48d4-b3f2-9da97ecec904" (UID: "19bc2263-4fae-48d4-b3f2-9da97ecec904"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.052358 4906 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19bc2263-4fae-48d4-b3f2-9da97ecec904-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.052389 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.052400 4906 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.052411 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2kc8q\" (UniqueName: \"kubernetes.io/projected/19bc2263-4fae-48d4-b3f2-9da97ecec904-kube-api-access-2kc8q\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.052424 4906 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.052434 4906 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/19bc2263-4fae-48d4-b3f2-9da97ecec904-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.052444 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.070415 4906 generic.go:334] "Generic (PLEG): container finished" podID="19bc2263-4fae-48d4-b3f2-9da97ecec904" containerID="927e6f2c3c74fb3e52e37ddb7b3c764bf2550fcb465d2be4491cd3a32e857ca9" exitCode=0 Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.070461 4906 generic.go:334] "Generic (PLEG): container finished" podID="19bc2263-4fae-48d4-b3f2-9da97ecec904" containerID="767db5d4f6a13cd8c6fda3f8b9f1b9c713db43ed0d24ecf006b3570710b710a1" exitCode=0 Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.070475 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19bc2263-4fae-48d4-b3f2-9da97ecec904","Type":"ContainerDied","Data":"927e6f2c3c74fb3e52e37ddb7b3c764bf2550fcb465d2be4491cd3a32e857ca9"} Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.070564 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19bc2263-4fae-48d4-b3f2-9da97ecec904","Type":"ContainerDied","Data":"767db5d4f6a13cd8c6fda3f8b9f1b9c713db43ed0d24ecf006b3570710b710a1"} Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.070575 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"19bc2263-4fae-48d4-b3f2-9da97ecec904","Type":"ContainerDied","Data":"50b063e61ea80f533550a06b42c606a7b17ce853f99558c0cfe2500d898cab24"} Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.070595 4906 scope.go:117] "RemoveContainer" containerID="bf58f92e1705c5bcb31cb0f054b889ce863b23e7daa44ed13a90d4525878eeb8" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.070610 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.088647 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-config-data" (OuterVolumeSpecName: "config-data") pod "19bc2263-4fae-48d4-b3f2-9da97ecec904" (UID: "19bc2263-4fae-48d4-b3f2-9da97ecec904"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.100590 4906 scope.go:117] "RemoveContainer" containerID="b408342ffb13f9b4ae1e5a4d094e6ab66fdf552733a281843b890475d57422d8" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.125091 4906 scope.go:117] "RemoveContainer" containerID="927e6f2c3c74fb3e52e37ddb7b3c764bf2550fcb465d2be4491cd3a32e857ca9" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.149075 4906 scope.go:117] "RemoveContainer" containerID="767db5d4f6a13cd8c6fda3f8b9f1b9c713db43ed0d24ecf006b3570710b710a1" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.154459 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/19bc2263-4fae-48d4-b3f2-9da97ecec904-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.174575 4906 scope.go:117] "RemoveContainer" containerID="bf58f92e1705c5bcb31cb0f054b889ce863b23e7daa44ed13a90d4525878eeb8" Feb 27 08:53:56 crc kubenswrapper[4906]: E0227 08:53:56.176040 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf58f92e1705c5bcb31cb0f054b889ce863b23e7daa44ed13a90d4525878eeb8\": container with ID starting with bf58f92e1705c5bcb31cb0f054b889ce863b23e7daa44ed13a90d4525878eeb8 not found: ID does not exist" containerID="bf58f92e1705c5bcb31cb0f054b889ce863b23e7daa44ed13a90d4525878eeb8" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.176095 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf58f92e1705c5bcb31cb0f054b889ce863b23e7daa44ed13a90d4525878eeb8"} err="failed to get container status \"bf58f92e1705c5bcb31cb0f054b889ce863b23e7daa44ed13a90d4525878eeb8\": rpc error: code = NotFound desc = could not find container \"bf58f92e1705c5bcb31cb0f054b889ce863b23e7daa44ed13a90d4525878eeb8\": container with ID starting with bf58f92e1705c5bcb31cb0f054b889ce863b23e7daa44ed13a90d4525878eeb8 not found: ID does not exist" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.176127 4906 scope.go:117] "RemoveContainer" containerID="b408342ffb13f9b4ae1e5a4d094e6ab66fdf552733a281843b890475d57422d8" Feb 27 08:53:56 crc kubenswrapper[4906]: E0227 08:53:56.176543 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b408342ffb13f9b4ae1e5a4d094e6ab66fdf552733a281843b890475d57422d8\": container with ID starting with b408342ffb13f9b4ae1e5a4d094e6ab66fdf552733a281843b890475d57422d8 not found: ID does not exist" containerID="b408342ffb13f9b4ae1e5a4d094e6ab66fdf552733a281843b890475d57422d8" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.176572 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b408342ffb13f9b4ae1e5a4d094e6ab66fdf552733a281843b890475d57422d8"} err="failed to get container status \"b408342ffb13f9b4ae1e5a4d094e6ab66fdf552733a281843b890475d57422d8\": rpc error: code = NotFound desc = could not find container \"b408342ffb13f9b4ae1e5a4d094e6ab66fdf552733a281843b890475d57422d8\": container with ID starting with b408342ffb13f9b4ae1e5a4d094e6ab66fdf552733a281843b890475d57422d8 not found: ID does not exist" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.176589 4906 scope.go:117] "RemoveContainer" containerID="927e6f2c3c74fb3e52e37ddb7b3c764bf2550fcb465d2be4491cd3a32e857ca9" Feb 27 08:53:56 crc kubenswrapper[4906]: E0227 08:53:56.176902 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"927e6f2c3c74fb3e52e37ddb7b3c764bf2550fcb465d2be4491cd3a32e857ca9\": container with ID starting with 927e6f2c3c74fb3e52e37ddb7b3c764bf2550fcb465d2be4491cd3a32e857ca9 not found: ID does not exist" containerID="927e6f2c3c74fb3e52e37ddb7b3c764bf2550fcb465d2be4491cd3a32e857ca9" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.176938 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"927e6f2c3c74fb3e52e37ddb7b3c764bf2550fcb465d2be4491cd3a32e857ca9"} err="failed to get container status \"927e6f2c3c74fb3e52e37ddb7b3c764bf2550fcb465d2be4491cd3a32e857ca9\": rpc error: code = NotFound desc = could not find container \"927e6f2c3c74fb3e52e37ddb7b3c764bf2550fcb465d2be4491cd3a32e857ca9\": container with ID starting with 927e6f2c3c74fb3e52e37ddb7b3c764bf2550fcb465d2be4491cd3a32e857ca9 not found: ID does not exist" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.176967 4906 scope.go:117] "RemoveContainer" containerID="767db5d4f6a13cd8c6fda3f8b9f1b9c713db43ed0d24ecf006b3570710b710a1" Feb 27 08:53:56 crc kubenswrapper[4906]: E0227 08:53:56.177243 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"767db5d4f6a13cd8c6fda3f8b9f1b9c713db43ed0d24ecf006b3570710b710a1\": container with ID starting with 767db5d4f6a13cd8c6fda3f8b9f1b9c713db43ed0d24ecf006b3570710b710a1 not found: ID does not exist" containerID="767db5d4f6a13cd8c6fda3f8b9f1b9c713db43ed0d24ecf006b3570710b710a1" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.177271 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"767db5d4f6a13cd8c6fda3f8b9f1b9c713db43ed0d24ecf006b3570710b710a1"} err="failed to get container status \"767db5d4f6a13cd8c6fda3f8b9f1b9c713db43ed0d24ecf006b3570710b710a1\": rpc error: code = NotFound desc = could not find container \"767db5d4f6a13cd8c6fda3f8b9f1b9c713db43ed0d24ecf006b3570710b710a1\": container with ID starting with 767db5d4f6a13cd8c6fda3f8b9f1b9c713db43ed0d24ecf006b3570710b710a1 not found: ID does not exist" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.177288 4906 scope.go:117] "RemoveContainer" containerID="bf58f92e1705c5bcb31cb0f054b889ce863b23e7daa44ed13a90d4525878eeb8" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.177580 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf58f92e1705c5bcb31cb0f054b889ce863b23e7daa44ed13a90d4525878eeb8"} err="failed to get container status \"bf58f92e1705c5bcb31cb0f054b889ce863b23e7daa44ed13a90d4525878eeb8\": rpc error: code = NotFound desc = could not find container \"bf58f92e1705c5bcb31cb0f054b889ce863b23e7daa44ed13a90d4525878eeb8\": container with ID starting with bf58f92e1705c5bcb31cb0f054b889ce863b23e7daa44ed13a90d4525878eeb8 not found: ID does not exist" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.177606 4906 scope.go:117] "RemoveContainer" containerID="b408342ffb13f9b4ae1e5a4d094e6ab66fdf552733a281843b890475d57422d8" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.178038 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b408342ffb13f9b4ae1e5a4d094e6ab66fdf552733a281843b890475d57422d8"} err="failed to get container status \"b408342ffb13f9b4ae1e5a4d094e6ab66fdf552733a281843b890475d57422d8\": rpc error: code = NotFound desc = could not find container \"b408342ffb13f9b4ae1e5a4d094e6ab66fdf552733a281843b890475d57422d8\": container with ID starting with b408342ffb13f9b4ae1e5a4d094e6ab66fdf552733a281843b890475d57422d8 not found: ID does not exist" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.178060 4906 scope.go:117] "RemoveContainer" containerID="927e6f2c3c74fb3e52e37ddb7b3c764bf2550fcb465d2be4491cd3a32e857ca9" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.178491 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"927e6f2c3c74fb3e52e37ddb7b3c764bf2550fcb465d2be4491cd3a32e857ca9"} err="failed to get container status \"927e6f2c3c74fb3e52e37ddb7b3c764bf2550fcb465d2be4491cd3a32e857ca9\": rpc error: code = NotFound desc = could not find container \"927e6f2c3c74fb3e52e37ddb7b3c764bf2550fcb465d2be4491cd3a32e857ca9\": container with ID starting with 927e6f2c3c74fb3e52e37ddb7b3c764bf2550fcb465d2be4491cd3a32e857ca9 not found: ID does not exist" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.178514 4906 scope.go:117] "RemoveContainer" containerID="767db5d4f6a13cd8c6fda3f8b9f1b9c713db43ed0d24ecf006b3570710b710a1" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.178839 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"767db5d4f6a13cd8c6fda3f8b9f1b9c713db43ed0d24ecf006b3570710b710a1"} err="failed to get container status \"767db5d4f6a13cd8c6fda3f8b9f1b9c713db43ed0d24ecf006b3570710b710a1\": rpc error: code = NotFound desc = could not find container \"767db5d4f6a13cd8c6fda3f8b9f1b9c713db43ed0d24ecf006b3570710b710a1\": container with ID starting with 767db5d4f6a13cd8c6fda3f8b9f1b9c713db43ed0d24ecf006b3570710b710a1 not found: ID does not exist" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.408343 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.418128 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.432921 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:53:56 crc kubenswrapper[4906]: E0227 08:53:56.433479 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19bc2263-4fae-48d4-b3f2-9da97ecec904" containerName="ceilometer-central-agent" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.433503 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="19bc2263-4fae-48d4-b3f2-9da97ecec904" containerName="ceilometer-central-agent" Feb 27 08:53:56 crc kubenswrapper[4906]: E0227 08:53:56.433524 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19bc2263-4fae-48d4-b3f2-9da97ecec904" containerName="proxy-httpd" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.433531 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="19bc2263-4fae-48d4-b3f2-9da97ecec904" containerName="proxy-httpd" Feb 27 08:53:56 crc kubenswrapper[4906]: E0227 08:53:56.433541 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19bc2263-4fae-48d4-b3f2-9da97ecec904" containerName="sg-core" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.433549 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="19bc2263-4fae-48d4-b3f2-9da97ecec904" containerName="sg-core" Feb 27 08:53:56 crc kubenswrapper[4906]: E0227 08:53:56.433558 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19bc2263-4fae-48d4-b3f2-9da97ecec904" containerName="ceilometer-notification-agent" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.433568 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="19bc2263-4fae-48d4-b3f2-9da97ecec904" containerName="ceilometer-notification-agent" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.434780 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="19bc2263-4fae-48d4-b3f2-9da97ecec904" containerName="sg-core" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.434807 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="19bc2263-4fae-48d4-b3f2-9da97ecec904" containerName="proxy-httpd" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.434834 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="19bc2263-4fae-48d4-b3f2-9da97ecec904" containerName="ceilometer-central-agent" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.434845 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="19bc2263-4fae-48d4-b3f2-9da97ecec904" containerName="ceilometer-notification-agent" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.454063 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.454218 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.497730 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.497974 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.497997 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.568478 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19bc2263-4fae-48d4-b3f2-9da97ecec904" path="/var/lib/kubelet/pods/19bc2263-4fae-48d4-b3f2-9da97ecec904/volumes" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.598870 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.599795 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.600037 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9mh2\" (UniqueName: \"kubernetes.io/projected/7eb0f259-6550-4850-a706-f511187d035d-kube-api-access-v9mh2\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.600123 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-config-data\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.600276 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7eb0f259-6550-4850-a706-f511187d035d-run-httpd\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.600311 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.600444 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-scripts\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.600634 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7eb0f259-6550-4850-a706-f511187d035d-log-httpd\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.702266 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.702749 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.702845 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.702914 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9mh2\" (UniqueName: \"kubernetes.io/projected/7eb0f259-6550-4850-a706-f511187d035d-kube-api-access-v9mh2\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.702949 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-config-data\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.703080 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7eb0f259-6550-4850-a706-f511187d035d-run-httpd\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.703117 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.703168 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-scripts\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.703318 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7eb0f259-6550-4850-a706-f511187d035d-log-httpd\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: E0227 08:53:56.703367 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[ceilometer-tls-certs combined-ca-bundle config-data kube-api-access-v9mh2 log-httpd run-httpd scripts sg-core-conf-yaml], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/ceilometer-0" podUID="7eb0f259-6550-4850-a706-f511187d035d" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.705151 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7eb0f259-6550-4850-a706-f511187d035d-log-httpd\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.707486 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-scripts\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.709027 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.710170 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7eb0f259-6550-4850-a706-f511187d035d-run-httpd\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.710789 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-config-data\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.711963 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.713674 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:56 crc kubenswrapper[4906]: I0227 08:53:56.729506 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9mh2\" (UniqueName: \"kubernetes.io/projected/7eb0f259-6550-4850-a706-f511187d035d-kube-api-access-v9mh2\") pod \"ceilometer-0\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " pod="openstack/ceilometer-0" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.081018 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.096345 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.214155 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7eb0f259-6550-4850-a706-f511187d035d-run-httpd\") pod \"7eb0f259-6550-4850-a706-f511187d035d\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.214278 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-ceilometer-tls-certs\") pod \"7eb0f259-6550-4850-a706-f511187d035d\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.214312 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7eb0f259-6550-4850-a706-f511187d035d-log-httpd\") pod \"7eb0f259-6550-4850-a706-f511187d035d\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.214495 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9mh2\" (UniqueName: \"kubernetes.io/projected/7eb0f259-6550-4850-a706-f511187d035d-kube-api-access-v9mh2\") pod \"7eb0f259-6550-4850-a706-f511187d035d\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.214547 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-combined-ca-bundle\") pod \"7eb0f259-6550-4850-a706-f511187d035d\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.214563 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7eb0f259-6550-4850-a706-f511187d035d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7eb0f259-6550-4850-a706-f511187d035d" (UID: "7eb0f259-6550-4850-a706-f511187d035d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.214610 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-scripts\") pod \"7eb0f259-6550-4850-a706-f511187d035d\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.214662 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-sg-core-conf-yaml\") pod \"7eb0f259-6550-4850-a706-f511187d035d\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.214740 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-config-data\") pod \"7eb0f259-6550-4850-a706-f511187d035d\" (UID: \"7eb0f259-6550-4850-a706-f511187d035d\") " Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.214806 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7eb0f259-6550-4850-a706-f511187d035d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7eb0f259-6550-4850-a706-f511187d035d" (UID: "7eb0f259-6550-4850-a706-f511187d035d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.215302 4906 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7eb0f259-6550-4850-a706-f511187d035d-run-httpd\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.215670 4906 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7eb0f259-6550-4850-a706-f511187d035d-log-httpd\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.220837 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7eb0f259-6550-4850-a706-f511187d035d" (UID: "7eb0f259-6550-4850-a706-f511187d035d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.221219 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "7eb0f259-6550-4850-a706-f511187d035d" (UID: "7eb0f259-6550-4850-a706-f511187d035d"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.221398 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7eb0f259-6550-4850-a706-f511187d035d-kube-api-access-v9mh2" (OuterVolumeSpecName: "kube-api-access-v9mh2") pod "7eb0f259-6550-4850-a706-f511187d035d" (UID: "7eb0f259-6550-4850-a706-f511187d035d"). InnerVolumeSpecName "kube-api-access-v9mh2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.222163 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7eb0f259-6550-4850-a706-f511187d035d" (UID: "7eb0f259-6550-4850-a706-f511187d035d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.222297 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-scripts" (OuterVolumeSpecName: "scripts") pod "7eb0f259-6550-4850-a706-f511187d035d" (UID: "7eb0f259-6550-4850-a706-f511187d035d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.222807 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-config-data" (OuterVolumeSpecName: "config-data") pod "7eb0f259-6550-4850-a706-f511187d035d" (UID: "7eb0f259-6550-4850-a706-f511187d035d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.323551 4906 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.323605 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9mh2\" (UniqueName: \"kubernetes.io/projected/7eb0f259-6550-4850-a706-f511187d035d-kube-api-access-v9mh2\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.323621 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.323634 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.323648 4906 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.323660 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7eb0f259-6550-4850-a706-f511187d035d-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.700394 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.833869 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-combined-ca-bundle\") pod \"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292\" (UID: \"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292\") " Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.833945 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jxvjm\" (UniqueName: \"kubernetes.io/projected/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-kube-api-access-jxvjm\") pod \"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292\" (UID: \"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292\") " Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.834157 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-config-data\") pod \"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292\" (UID: \"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292\") " Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.834197 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-logs\") pod \"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292\" (UID: \"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292\") " Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.835489 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-logs" (OuterVolumeSpecName: "logs") pod "d4bebfbe-166b-40f6-9b9e-f0fcc20e6292" (UID: "d4bebfbe-166b-40f6-9b9e-f0fcc20e6292"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.840813 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-kube-api-access-jxvjm" (OuterVolumeSpecName: "kube-api-access-jxvjm") pod "d4bebfbe-166b-40f6-9b9e-f0fcc20e6292" (UID: "d4bebfbe-166b-40f6-9b9e-f0fcc20e6292"). InnerVolumeSpecName "kube-api-access-jxvjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.844292 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jxvjm\" (UniqueName: \"kubernetes.io/projected/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-kube-api-access-jxvjm\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.844321 4906 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-logs\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.881128 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d4bebfbe-166b-40f6-9b9e-f0fcc20e6292" (UID: "d4bebfbe-166b-40f6-9b9e-f0fcc20e6292"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.891151 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-config-data" (OuterVolumeSpecName: "config-data") pod "d4bebfbe-166b-40f6-9b9e-f0fcc20e6292" (UID: "d4bebfbe-166b-40f6-9b9e-f0fcc20e6292"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.945790 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:57 crc kubenswrapper[4906]: I0227 08:53:57.945824 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.092405 4906 generic.go:334] "Generic (PLEG): container finished" podID="d4bebfbe-166b-40f6-9b9e-f0fcc20e6292" containerID="cfcca24aa51c77dcd3566e35f01ad6c1e1cccf76e8fcb2234aca39befd64b639" exitCode=0 Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.092464 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.093007 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.092517 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292","Type":"ContainerDied","Data":"cfcca24aa51c77dcd3566e35f01ad6c1e1cccf76e8fcb2234aca39befd64b639"} Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.093136 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d4bebfbe-166b-40f6-9b9e-f0fcc20e6292","Type":"ContainerDied","Data":"7286ed2578c7e55e85c293f0e0ba876029696aea6f368d7c43ae85e4243da7ef"} Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.093176 4906 scope.go:117] "RemoveContainer" containerID="cfcca24aa51c77dcd3566e35f01ad6c1e1cccf76e8fcb2234aca39befd64b639" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.128862 4906 scope.go:117] "RemoveContainer" containerID="1a3580e83896166cc75f475eb68df842375f4c489966a978ea7fbb6ed676e61a" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.166131 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.182661 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.185103 4906 scope.go:117] "RemoveContainer" containerID="cfcca24aa51c77dcd3566e35f01ad6c1e1cccf76e8fcb2234aca39befd64b639" Feb 27 08:53:58 crc kubenswrapper[4906]: E0227 08:53:58.191356 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cfcca24aa51c77dcd3566e35f01ad6c1e1cccf76e8fcb2234aca39befd64b639\": container with ID starting with cfcca24aa51c77dcd3566e35f01ad6c1e1cccf76e8fcb2234aca39befd64b639 not found: ID does not exist" containerID="cfcca24aa51c77dcd3566e35f01ad6c1e1cccf76e8fcb2234aca39befd64b639" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.191414 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cfcca24aa51c77dcd3566e35f01ad6c1e1cccf76e8fcb2234aca39befd64b639"} err="failed to get container status \"cfcca24aa51c77dcd3566e35f01ad6c1e1cccf76e8fcb2234aca39befd64b639\": rpc error: code = NotFound desc = could not find container \"cfcca24aa51c77dcd3566e35f01ad6c1e1cccf76e8fcb2234aca39befd64b639\": container with ID starting with cfcca24aa51c77dcd3566e35f01ad6c1e1cccf76e8fcb2234aca39befd64b639 not found: ID does not exist" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.191447 4906 scope.go:117] "RemoveContainer" containerID="1a3580e83896166cc75f475eb68df842375f4c489966a978ea7fbb6ed676e61a" Feb 27 08:53:58 crc kubenswrapper[4906]: E0227 08:53:58.201624 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a3580e83896166cc75f475eb68df842375f4c489966a978ea7fbb6ed676e61a\": container with ID starting with 1a3580e83896166cc75f475eb68df842375f4c489966a978ea7fbb6ed676e61a not found: ID does not exist" containerID="1a3580e83896166cc75f475eb68df842375f4c489966a978ea7fbb6ed676e61a" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.201686 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a3580e83896166cc75f475eb68df842375f4c489966a978ea7fbb6ed676e61a"} err="failed to get container status \"1a3580e83896166cc75f475eb68df842375f4c489966a978ea7fbb6ed676e61a\": rpc error: code = NotFound desc = could not find container \"1a3580e83896166cc75f475eb68df842375f4c489966a978ea7fbb6ed676e61a\": container with ID starting with 1a3580e83896166cc75f475eb68df842375f4c489966a978ea7fbb6ed676e61a not found: ID does not exist" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.206843 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.226403 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.238627 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:53:58 crc kubenswrapper[4906]: E0227 08:53:58.239458 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4bebfbe-166b-40f6-9b9e-f0fcc20e6292" containerName="nova-api-api" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.239584 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4bebfbe-166b-40f6-9b9e-f0fcc20e6292" containerName="nova-api-api" Feb 27 08:53:58 crc kubenswrapper[4906]: E0227 08:53:58.239642 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4bebfbe-166b-40f6-9b9e-f0fcc20e6292" containerName="nova-api-log" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.239688 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4bebfbe-166b-40f6-9b9e-f0fcc20e6292" containerName="nova-api-log" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.240156 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4bebfbe-166b-40f6-9b9e-f0fcc20e6292" containerName="nova-api-log" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.240239 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4bebfbe-166b-40f6-9b9e-f0fcc20e6292" containerName="nova-api-api" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.242465 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.249449 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.252559 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.257561 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.257991 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.258300 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.259022 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.259331 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.259505 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.263388 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.270386 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.352776 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m2t98\" (UniqueName: \"kubernetes.io/projected/fb48918b-c493-4d20-98ab-b5c3d22d8a46-kube-api-access-m2t98\") pod \"nova-api-0\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.353530 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-public-tls-certs\") pod \"nova-api-0\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.353631 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/385c1ece-0f96-4433-b8a4-8719a56f5697-config-data\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.353725 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/385c1ece-0f96-4433-b8a4-8719a56f5697-run-httpd\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.353791 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/385c1ece-0f96-4433-b8a4-8719a56f5697-log-httpd\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.353948 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/385c1ece-0f96-4433-b8a4-8719a56f5697-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.354102 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/385c1ece-0f96-4433-b8a4-8719a56f5697-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.354234 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-internal-tls-certs\") pod \"nova-api-0\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.354361 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6sfcp\" (UniqueName: \"kubernetes.io/projected/385c1ece-0f96-4433-b8a4-8719a56f5697-kube-api-access-6sfcp\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.354502 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/385c1ece-0f96-4433-b8a4-8719a56f5697-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.354622 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb48918b-c493-4d20-98ab-b5c3d22d8a46-logs\") pod \"nova-api-0\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.354697 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.354791 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/385c1ece-0f96-4433-b8a4-8719a56f5697-scripts\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.354902 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-config-data\") pod \"nova-api-0\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.457365 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/385c1ece-0f96-4433-b8a4-8719a56f5697-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.457434 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/385c1ece-0f96-4433-b8a4-8719a56f5697-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.457483 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-internal-tls-certs\") pod \"nova-api-0\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.457517 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6sfcp\" (UniqueName: \"kubernetes.io/projected/385c1ece-0f96-4433-b8a4-8719a56f5697-kube-api-access-6sfcp\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.457561 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/385c1ece-0f96-4433-b8a4-8719a56f5697-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.457596 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb48918b-c493-4d20-98ab-b5c3d22d8a46-logs\") pod \"nova-api-0\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.457613 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.457634 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/385c1ece-0f96-4433-b8a4-8719a56f5697-scripts\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.457663 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-config-data\") pod \"nova-api-0\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.457692 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m2t98\" (UniqueName: \"kubernetes.io/projected/fb48918b-c493-4d20-98ab-b5c3d22d8a46-kube-api-access-m2t98\") pod \"nova-api-0\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.457716 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-public-tls-certs\") pod \"nova-api-0\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.457739 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/385c1ece-0f96-4433-b8a4-8719a56f5697-config-data\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.457763 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/385c1ece-0f96-4433-b8a4-8719a56f5697-run-httpd\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.457778 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/385c1ece-0f96-4433-b8a4-8719a56f5697-log-httpd\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.460480 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/385c1ece-0f96-4433-b8a4-8719a56f5697-run-httpd\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.463633 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb48918b-c493-4d20-98ab-b5c3d22d8a46-logs\") pod \"nova-api-0\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.465064 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-internal-tls-certs\") pod \"nova-api-0\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.465180 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/385c1ece-0f96-4433-b8a4-8719a56f5697-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.465502 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/385c1ece-0f96-4433-b8a4-8719a56f5697-log-httpd\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.465718 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/385c1ece-0f96-4433-b8a4-8719a56f5697-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.466539 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/385c1ece-0f96-4433-b8a4-8719a56f5697-scripts\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.468820 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-config-data\") pod \"nova-api-0\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.475996 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.478666 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m2t98\" (UniqueName: \"kubernetes.io/projected/fb48918b-c493-4d20-98ab-b5c3d22d8a46-kube-api-access-m2t98\") pod \"nova-api-0\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.479396 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/385c1ece-0f96-4433-b8a4-8719a56f5697-config-data\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.479485 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/385c1ece-0f96-4433-b8a4-8719a56f5697-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.485514 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-public-tls-certs\") pod \"nova-api-0\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.487057 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6sfcp\" (UniqueName: \"kubernetes.io/projected/385c1ece-0f96-4433-b8a4-8719a56f5697-kube-api-access-6sfcp\") pod \"ceilometer-0\" (UID: \"385c1ece-0f96-4433-b8a4-8719a56f5697\") " pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.564275 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7eb0f259-6550-4850-a706-f511187d035d" path="/var/lib/kubelet/pods/7eb0f259-6550-4850-a706-f511187d035d/volumes" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.564812 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4bebfbe-166b-40f6-9b9e-f0fcc20e6292" path="/var/lib/kubelet/pods/d4bebfbe-166b-40f6-9b9e-f0fcc20e6292/volumes" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.572232 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.585585 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.761339 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:58 crc kubenswrapper[4906]: I0227 08:53:58.793573 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.110611 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.121362 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 27 08:53:59 crc kubenswrapper[4906]: W0227 08:53:59.130452 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfb48918b_c493_4d20_98ab_b5c3d22d8a46.slice/crio-0d35784fc9d2f539685653730c2b52e56fb4b7525e985369011a215647bc2680 WatchSource:0}: Error finding container 0d35784fc9d2f539685653730c2b52e56fb4b7525e985369011a215647bc2680: Status 404 returned error can't find the container with id 0d35784fc9d2f539685653730c2b52e56fb4b7525e985369011a215647bc2680 Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.136254 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.335216 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-v4w5x"] Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.336633 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-v4w5x" Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.339990 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.340312 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.349303 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-v4w5x"] Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.380828 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14371a49-0d1f-4596-9d93-5470dfbdb6c4-scripts\") pod \"nova-cell1-cell-mapping-v4w5x\" (UID: \"14371a49-0d1f-4596-9d93-5470dfbdb6c4\") " pod="openstack/nova-cell1-cell-mapping-v4w5x" Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.381107 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k9wd9\" (UniqueName: \"kubernetes.io/projected/14371a49-0d1f-4596-9d93-5470dfbdb6c4-kube-api-access-k9wd9\") pod \"nova-cell1-cell-mapping-v4w5x\" (UID: \"14371a49-0d1f-4596-9d93-5470dfbdb6c4\") " pod="openstack/nova-cell1-cell-mapping-v4w5x" Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.381537 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14371a49-0d1f-4596-9d93-5470dfbdb6c4-config-data\") pod \"nova-cell1-cell-mapping-v4w5x\" (UID: \"14371a49-0d1f-4596-9d93-5470dfbdb6c4\") " pod="openstack/nova-cell1-cell-mapping-v4w5x" Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.381847 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14371a49-0d1f-4596-9d93-5470dfbdb6c4-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-v4w5x\" (UID: \"14371a49-0d1f-4596-9d93-5470dfbdb6c4\") " pod="openstack/nova-cell1-cell-mapping-v4w5x" Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.484001 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14371a49-0d1f-4596-9d93-5470dfbdb6c4-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-v4w5x\" (UID: \"14371a49-0d1f-4596-9d93-5470dfbdb6c4\") " pod="openstack/nova-cell1-cell-mapping-v4w5x" Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.484103 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14371a49-0d1f-4596-9d93-5470dfbdb6c4-scripts\") pod \"nova-cell1-cell-mapping-v4w5x\" (UID: \"14371a49-0d1f-4596-9d93-5470dfbdb6c4\") " pod="openstack/nova-cell1-cell-mapping-v4w5x" Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.484153 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k9wd9\" (UniqueName: \"kubernetes.io/projected/14371a49-0d1f-4596-9d93-5470dfbdb6c4-kube-api-access-k9wd9\") pod \"nova-cell1-cell-mapping-v4w5x\" (UID: \"14371a49-0d1f-4596-9d93-5470dfbdb6c4\") " pod="openstack/nova-cell1-cell-mapping-v4w5x" Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.484230 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14371a49-0d1f-4596-9d93-5470dfbdb6c4-config-data\") pod \"nova-cell1-cell-mapping-v4w5x\" (UID: \"14371a49-0d1f-4596-9d93-5470dfbdb6c4\") " pod="openstack/nova-cell1-cell-mapping-v4w5x" Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.491671 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14371a49-0d1f-4596-9d93-5470dfbdb6c4-scripts\") pod \"nova-cell1-cell-mapping-v4w5x\" (UID: \"14371a49-0d1f-4596-9d93-5470dfbdb6c4\") " pod="openstack/nova-cell1-cell-mapping-v4w5x" Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.498591 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14371a49-0d1f-4596-9d93-5470dfbdb6c4-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-v4w5x\" (UID: \"14371a49-0d1f-4596-9d93-5470dfbdb6c4\") " pod="openstack/nova-cell1-cell-mapping-v4w5x" Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.498624 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14371a49-0d1f-4596-9d93-5470dfbdb6c4-config-data\") pod \"nova-cell1-cell-mapping-v4w5x\" (UID: \"14371a49-0d1f-4596-9d93-5470dfbdb6c4\") " pod="openstack/nova-cell1-cell-mapping-v4w5x" Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.501788 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k9wd9\" (UniqueName: \"kubernetes.io/projected/14371a49-0d1f-4596-9d93-5470dfbdb6c4-kube-api-access-k9wd9\") pod \"nova-cell1-cell-mapping-v4w5x\" (UID: \"14371a49-0d1f-4596-9d93-5470dfbdb6c4\") " pod="openstack/nova-cell1-cell-mapping-v4w5x" Feb 27 08:53:59 crc kubenswrapper[4906]: I0227 08:53:59.656466 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-v4w5x" Feb 27 08:54:00 crc kubenswrapper[4906]: I0227 08:54:00.135342 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fb48918b-c493-4d20-98ab-b5c3d22d8a46","Type":"ContainerStarted","Data":"f6725a49bf018bf98ed8d297ac8271bc6afb28c83c400958ddd02e6981b6c325"} Feb 27 08:54:00 crc kubenswrapper[4906]: I0227 08:54:00.137321 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fb48918b-c493-4d20-98ab-b5c3d22d8a46","Type":"ContainerStarted","Data":"09f9346b1b75ab35e11f3585404d8ae7b1f8f9a474228409b5dd02e0517786e7"} Feb 27 08:54:00 crc kubenswrapper[4906]: I0227 08:54:00.137396 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fb48918b-c493-4d20-98ab-b5c3d22d8a46","Type":"ContainerStarted","Data":"0d35784fc9d2f539685653730c2b52e56fb4b7525e985369011a215647bc2680"} Feb 27 08:54:00 crc kubenswrapper[4906]: I0227 08:54:00.138287 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"385c1ece-0f96-4433-b8a4-8719a56f5697","Type":"ContainerStarted","Data":"3c93616a87073c3447b85603bd9fdcc124834cd367f93cc947a2529a1a177bf1"} Feb 27 08:54:00 crc kubenswrapper[4906]: I0227 08:54:00.138353 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"385c1ece-0f96-4433-b8a4-8719a56f5697","Type":"ContainerStarted","Data":"f4ae449b8b15e153dc6d259149b31e5f6176abbfb83fb0461246cfdc89561265"} Feb 27 08:54:00 crc kubenswrapper[4906]: I0227 08:54:00.150151 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536374-vk6qp"] Feb 27 08:54:00 crc kubenswrapper[4906]: I0227 08:54:00.151525 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536374-vk6qp" Feb 27 08:54:00 crc kubenswrapper[4906]: I0227 08:54:00.156789 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 08:54:00 crc kubenswrapper[4906]: I0227 08:54:00.157007 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 08:54:00 crc kubenswrapper[4906]: I0227 08:54:00.157156 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 08:54:00 crc kubenswrapper[4906]: I0227 08:54:00.165145 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536374-vk6qp"] Feb 27 08:54:00 crc kubenswrapper[4906]: I0227 08:54:00.173717 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.173686457 podStartE2EDuration="2.173686457s" podCreationTimestamp="2026-02-27 08:53:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:54:00.158277612 +0000 UTC m=+1538.552679222" watchObservedRunningTime="2026-02-27 08:54:00.173686457 +0000 UTC m=+1538.568088067" Feb 27 08:54:00 crc kubenswrapper[4906]: I0227 08:54:00.199140 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8gxz\" (UniqueName: \"kubernetes.io/projected/0d28859e-464e-4266-89aa-2318bc051b40-kube-api-access-h8gxz\") pod \"auto-csr-approver-29536374-vk6qp\" (UID: \"0d28859e-464e-4266-89aa-2318bc051b40\") " pod="openshift-infra/auto-csr-approver-29536374-vk6qp" Feb 27 08:54:00 crc kubenswrapper[4906]: I0227 08:54:00.231143 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-v4w5x"] Feb 27 08:54:00 crc kubenswrapper[4906]: I0227 08:54:00.301045 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8gxz\" (UniqueName: \"kubernetes.io/projected/0d28859e-464e-4266-89aa-2318bc051b40-kube-api-access-h8gxz\") pod \"auto-csr-approver-29536374-vk6qp\" (UID: \"0d28859e-464e-4266-89aa-2318bc051b40\") " pod="openshift-infra/auto-csr-approver-29536374-vk6qp" Feb 27 08:54:00 crc kubenswrapper[4906]: I0227 08:54:00.327500 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8gxz\" (UniqueName: \"kubernetes.io/projected/0d28859e-464e-4266-89aa-2318bc051b40-kube-api-access-h8gxz\") pod \"auto-csr-approver-29536374-vk6qp\" (UID: \"0d28859e-464e-4266-89aa-2318bc051b40\") " pod="openshift-infra/auto-csr-approver-29536374-vk6qp" Feb 27 08:54:00 crc kubenswrapper[4906]: I0227 08:54:00.472662 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536374-vk6qp" Feb 27 08:54:00 crc kubenswrapper[4906]: I0227 08:54:00.986018 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536374-vk6qp"] Feb 27 08:54:00 crc kubenswrapper[4906]: W0227 08:54:00.996560 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0d28859e_464e_4266_89aa_2318bc051b40.slice/crio-a2d95bf8200174fabed800def195dc958a85e79ce1e54489844ba97b9edeac8c WatchSource:0}: Error finding container a2d95bf8200174fabed800def195dc958a85e79ce1e54489844ba97b9edeac8c: Status 404 returned error can't find the container with id a2d95bf8200174fabed800def195dc958a85e79ce1e54489844ba97b9edeac8c Feb 27 08:54:01 crc kubenswrapper[4906]: I0227 08:54:01.150694 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-v4w5x" event={"ID":"14371a49-0d1f-4596-9d93-5470dfbdb6c4","Type":"ContainerStarted","Data":"d18bb50f3f80b1cfd19397b9fb3700fecde5c046b31d589dbf17546c53ffc5a9"} Feb 27 08:54:01 crc kubenswrapper[4906]: I0227 08:54:01.151221 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-v4w5x" event={"ID":"14371a49-0d1f-4596-9d93-5470dfbdb6c4","Type":"ContainerStarted","Data":"d881ec3b6d009ceb17871deabaf103bbdc4c7682df295396650d62bfaa93e17f"} Feb 27 08:54:01 crc kubenswrapper[4906]: I0227 08:54:01.155698 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536374-vk6qp" event={"ID":"0d28859e-464e-4266-89aa-2318bc051b40","Type":"ContainerStarted","Data":"a2d95bf8200174fabed800def195dc958a85e79ce1e54489844ba97b9edeac8c"} Feb 27 08:54:01 crc kubenswrapper[4906]: I0227 08:54:01.174439 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-v4w5x" podStartSLOduration=2.174418808 podStartE2EDuration="2.174418808s" podCreationTimestamp="2026-02-27 08:53:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:54:01.169998233 +0000 UTC m=+1539.564399853" watchObservedRunningTime="2026-02-27 08:54:01.174418808 +0000 UTC m=+1539.568820418" Feb 27 08:54:01 crc kubenswrapper[4906]: I0227 08:54:01.573165 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:54:01 crc kubenswrapper[4906]: I0227 08:54:01.705162 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-699d7ddff-r4mqm"] Feb 27 08:54:01 crc kubenswrapper[4906]: I0227 08:54:01.705444 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" podUID="b09dc78d-c1b1-4370-938c-49fc82324733" containerName="dnsmasq-dns" containerID="cri-o://f1d3a957a25ff1140f21d57e8608430db4a9d06f35e3a7f83092b423b204a19d" gracePeriod=10 Feb 27 08:54:02 crc kubenswrapper[4906]: I0227 08:54:02.030206 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" podUID="b09dc78d-c1b1-4370-938c-49fc82324733" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.196:5353: connect: connection refused" Feb 27 08:54:02 crc kubenswrapper[4906]: I0227 08:54:02.170042 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"385c1ece-0f96-4433-b8a4-8719a56f5697","Type":"ContainerStarted","Data":"31fb902a29e8b27aeb0c231cbd97b9500f671e04e41befbf3bf21d803cad2a86"} Feb 27 08:54:02 crc kubenswrapper[4906]: I0227 08:54:02.180130 4906 generic.go:334] "Generic (PLEG): container finished" podID="b09dc78d-c1b1-4370-938c-49fc82324733" containerID="f1d3a957a25ff1140f21d57e8608430db4a9d06f35e3a7f83092b423b204a19d" exitCode=0 Feb 27 08:54:02 crc kubenswrapper[4906]: I0227 08:54:02.180240 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" event={"ID":"b09dc78d-c1b1-4370-938c-49fc82324733","Type":"ContainerDied","Data":"f1d3a957a25ff1140f21d57e8608430db4a9d06f35e3a7f83092b423b204a19d"} Feb 27 08:54:02 crc kubenswrapper[4906]: I0227 08:54:02.926497 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.069991 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-dns-swift-storage-0\") pod \"b09dc78d-c1b1-4370-938c-49fc82324733\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.070539 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-ovsdbserver-sb\") pod \"b09dc78d-c1b1-4370-938c-49fc82324733\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.070683 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-ovsdbserver-nb\") pod \"b09dc78d-c1b1-4370-938c-49fc82324733\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.070770 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-config\") pod \"b09dc78d-c1b1-4370-938c-49fc82324733\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.070833 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jn86p\" (UniqueName: \"kubernetes.io/projected/b09dc78d-c1b1-4370-938c-49fc82324733-kube-api-access-jn86p\") pod \"b09dc78d-c1b1-4370-938c-49fc82324733\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.070906 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-dns-svc\") pod \"b09dc78d-c1b1-4370-938c-49fc82324733\" (UID: \"b09dc78d-c1b1-4370-938c-49fc82324733\") " Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.077977 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b09dc78d-c1b1-4370-938c-49fc82324733-kube-api-access-jn86p" (OuterVolumeSpecName: "kube-api-access-jn86p") pod "b09dc78d-c1b1-4370-938c-49fc82324733" (UID: "b09dc78d-c1b1-4370-938c-49fc82324733"). InnerVolumeSpecName "kube-api-access-jn86p". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.140217 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b09dc78d-c1b1-4370-938c-49fc82324733" (UID: "b09dc78d-c1b1-4370-938c-49fc82324733"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.151552 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-config" (OuterVolumeSpecName: "config") pod "b09dc78d-c1b1-4370-938c-49fc82324733" (UID: "b09dc78d-c1b1-4370-938c-49fc82324733"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.151739 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b09dc78d-c1b1-4370-938c-49fc82324733" (UID: "b09dc78d-c1b1-4370-938c-49fc82324733"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.164380 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b09dc78d-c1b1-4370-938c-49fc82324733" (UID: "b09dc78d-c1b1-4370-938c-49fc82324733"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.173200 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b09dc78d-c1b1-4370-938c-49fc82324733" (UID: "b09dc78d-c1b1-4370-938c-49fc82324733"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.173978 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.174013 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jn86p\" (UniqueName: \"kubernetes.io/projected/b09dc78d-c1b1-4370-938c-49fc82324733-kube-api-access-jn86p\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.174028 4906 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.174036 4906 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.174045 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.174054 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b09dc78d-c1b1-4370-938c-49fc82324733-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.202367 4906 generic.go:334] "Generic (PLEG): container finished" podID="0d28859e-464e-4266-89aa-2318bc051b40" containerID="ec1256f38f1b6bf03ad42d99f256f3bc755667f67925ff9b325a4878eba95298" exitCode=0 Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.202569 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536374-vk6qp" event={"ID":"0d28859e-464e-4266-89aa-2318bc051b40","Type":"ContainerDied","Data":"ec1256f38f1b6bf03ad42d99f256f3bc755667f67925ff9b325a4878eba95298"} Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.205503 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" event={"ID":"b09dc78d-c1b1-4370-938c-49fc82324733","Type":"ContainerDied","Data":"16c281be24153d24a454a4a6cb15d8009d12dece28cc224b78b74653d9d4d8d0"} Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.205560 4906 scope.go:117] "RemoveContainer" containerID="f1d3a957a25ff1140f21d57e8608430db4a9d06f35e3a7f83092b423b204a19d" Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.205695 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-699d7ddff-r4mqm" Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.214971 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"385c1ece-0f96-4433-b8a4-8719a56f5697","Type":"ContainerStarted","Data":"c89243b129c8b7b3f971643787e60b4ba464ad77dc040dc30cbfb14e4437f9ad"} Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.247230 4906 scope.go:117] "RemoveContainer" containerID="7b9dbc7d7615ae7aeffedbaa8a65cbdc327ea82d656cb71aae1751864d1a9bd2" Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.309410 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-699d7ddff-r4mqm"] Feb 27 08:54:03 crc kubenswrapper[4906]: I0227 08:54:03.319729 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-699d7ddff-r4mqm"] Feb 27 08:54:04 crc kubenswrapper[4906]: I0227 08:54:04.567518 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b09dc78d-c1b1-4370-938c-49fc82324733" path="/var/lib/kubelet/pods/b09dc78d-c1b1-4370-938c-49fc82324733/volumes" Feb 27 08:54:04 crc kubenswrapper[4906]: I0227 08:54:04.716995 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536374-vk6qp" Feb 27 08:54:04 crc kubenswrapper[4906]: I0227 08:54:04.809335 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8gxz\" (UniqueName: \"kubernetes.io/projected/0d28859e-464e-4266-89aa-2318bc051b40-kube-api-access-h8gxz\") pod \"0d28859e-464e-4266-89aa-2318bc051b40\" (UID: \"0d28859e-464e-4266-89aa-2318bc051b40\") " Feb 27 08:54:04 crc kubenswrapper[4906]: I0227 08:54:04.818308 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d28859e-464e-4266-89aa-2318bc051b40-kube-api-access-h8gxz" (OuterVolumeSpecName: "kube-api-access-h8gxz") pod "0d28859e-464e-4266-89aa-2318bc051b40" (UID: "0d28859e-464e-4266-89aa-2318bc051b40"). InnerVolumeSpecName "kube-api-access-h8gxz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:54:04 crc kubenswrapper[4906]: I0227 08:54:04.912316 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8gxz\" (UniqueName: \"kubernetes.io/projected/0d28859e-464e-4266-89aa-2318bc051b40-kube-api-access-h8gxz\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:05 crc kubenswrapper[4906]: I0227 08:54:05.243267 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"385c1ece-0f96-4433-b8a4-8719a56f5697","Type":"ContainerStarted","Data":"09a878c7cbd1034d7224fce48a359609488006649d7d618866c2e6a5e511383d"} Feb 27 08:54:05 crc kubenswrapper[4906]: I0227 08:54:05.245452 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Feb 27 08:54:05 crc kubenswrapper[4906]: I0227 08:54:05.247039 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536374-vk6qp" event={"ID":"0d28859e-464e-4266-89aa-2318bc051b40","Type":"ContainerDied","Data":"a2d95bf8200174fabed800def195dc958a85e79ce1e54489844ba97b9edeac8c"} Feb 27 08:54:05 crc kubenswrapper[4906]: I0227 08:54:05.247089 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2d95bf8200174fabed800def195dc958a85e79ce1e54489844ba97b9edeac8c" Feb 27 08:54:05 crc kubenswrapper[4906]: I0227 08:54:05.247232 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536374-vk6qp" Feb 27 08:54:05 crc kubenswrapper[4906]: I0227 08:54:05.288396 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.289261681 podStartE2EDuration="7.288369654s" podCreationTimestamp="2026-02-27 08:53:58 +0000 UTC" firstStartedPulling="2026-02-27 08:53:59.130589691 +0000 UTC m=+1537.524991301" lastFinishedPulling="2026-02-27 08:54:04.129697664 +0000 UTC m=+1542.524099274" observedRunningTime="2026-02-27 08:54:05.278622268 +0000 UTC m=+1543.673023878" watchObservedRunningTime="2026-02-27 08:54:05.288369654 +0000 UTC m=+1543.682771284" Feb 27 08:54:05 crc kubenswrapper[4906]: I0227 08:54:05.808014 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536368-tsnrz"] Feb 27 08:54:05 crc kubenswrapper[4906]: I0227 08:54:05.827213 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536368-tsnrz"] Feb 27 08:54:06 crc kubenswrapper[4906]: I0227 08:54:06.569028 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e6ebab9-7ce0-4b39-8f00-bb43625e784a" path="/var/lib/kubelet/pods/6e6ebab9-7ce0-4b39-8f00-bb43625e784a/volumes" Feb 27 08:54:07 crc kubenswrapper[4906]: I0227 08:54:07.269604 4906 generic.go:334] "Generic (PLEG): container finished" podID="14371a49-0d1f-4596-9d93-5470dfbdb6c4" containerID="d18bb50f3f80b1cfd19397b9fb3700fecde5c046b31d589dbf17546c53ffc5a9" exitCode=0 Feb 27 08:54:07 crc kubenswrapper[4906]: I0227 08:54:07.269711 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-v4w5x" event={"ID":"14371a49-0d1f-4596-9d93-5470dfbdb6c4","Type":"ContainerDied","Data":"d18bb50f3f80b1cfd19397b9fb3700fecde5c046b31d589dbf17546c53ffc5a9"} Feb 27 08:54:08 crc kubenswrapper[4906]: I0227 08:54:08.586421 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 27 08:54:08 crc kubenswrapper[4906]: I0227 08:54:08.586837 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 27 08:54:08 crc kubenswrapper[4906]: I0227 08:54:08.724102 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-v4w5x" Feb 27 08:54:08 crc kubenswrapper[4906]: I0227 08:54:08.802843 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14371a49-0d1f-4596-9d93-5470dfbdb6c4-combined-ca-bundle\") pod \"14371a49-0d1f-4596-9d93-5470dfbdb6c4\" (UID: \"14371a49-0d1f-4596-9d93-5470dfbdb6c4\") " Feb 27 08:54:08 crc kubenswrapper[4906]: I0227 08:54:08.803064 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14371a49-0d1f-4596-9d93-5470dfbdb6c4-config-data\") pod \"14371a49-0d1f-4596-9d93-5470dfbdb6c4\" (UID: \"14371a49-0d1f-4596-9d93-5470dfbdb6c4\") " Feb 27 08:54:08 crc kubenswrapper[4906]: I0227 08:54:08.803235 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k9wd9\" (UniqueName: \"kubernetes.io/projected/14371a49-0d1f-4596-9d93-5470dfbdb6c4-kube-api-access-k9wd9\") pod \"14371a49-0d1f-4596-9d93-5470dfbdb6c4\" (UID: \"14371a49-0d1f-4596-9d93-5470dfbdb6c4\") " Feb 27 08:54:08 crc kubenswrapper[4906]: I0227 08:54:08.803479 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14371a49-0d1f-4596-9d93-5470dfbdb6c4-scripts\") pod \"14371a49-0d1f-4596-9d93-5470dfbdb6c4\" (UID: \"14371a49-0d1f-4596-9d93-5470dfbdb6c4\") " Feb 27 08:54:08 crc kubenswrapper[4906]: I0227 08:54:08.812719 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14371a49-0d1f-4596-9d93-5470dfbdb6c4-kube-api-access-k9wd9" (OuterVolumeSpecName: "kube-api-access-k9wd9") pod "14371a49-0d1f-4596-9d93-5470dfbdb6c4" (UID: "14371a49-0d1f-4596-9d93-5470dfbdb6c4"). InnerVolumeSpecName "kube-api-access-k9wd9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:54:08 crc kubenswrapper[4906]: I0227 08:54:08.821180 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14371a49-0d1f-4596-9d93-5470dfbdb6c4-scripts" (OuterVolumeSpecName: "scripts") pod "14371a49-0d1f-4596-9d93-5470dfbdb6c4" (UID: "14371a49-0d1f-4596-9d93-5470dfbdb6c4"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:54:08 crc kubenswrapper[4906]: I0227 08:54:08.838213 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14371a49-0d1f-4596-9d93-5470dfbdb6c4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "14371a49-0d1f-4596-9d93-5470dfbdb6c4" (UID: "14371a49-0d1f-4596-9d93-5470dfbdb6c4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:54:08 crc kubenswrapper[4906]: I0227 08:54:08.842018 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/14371a49-0d1f-4596-9d93-5470dfbdb6c4-config-data" (OuterVolumeSpecName: "config-data") pod "14371a49-0d1f-4596-9d93-5470dfbdb6c4" (UID: "14371a49-0d1f-4596-9d93-5470dfbdb6c4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:54:08 crc kubenswrapper[4906]: I0227 08:54:08.906146 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/14371a49-0d1f-4596-9d93-5470dfbdb6c4-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:08 crc kubenswrapper[4906]: I0227 08:54:08.906205 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k9wd9\" (UniqueName: \"kubernetes.io/projected/14371a49-0d1f-4596-9d93-5470dfbdb6c4-kube-api-access-k9wd9\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:08 crc kubenswrapper[4906]: I0227 08:54:08.906221 4906 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/14371a49-0d1f-4596-9d93-5470dfbdb6c4-scripts\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:08 crc kubenswrapper[4906]: I0227 08:54:08.906234 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/14371a49-0d1f-4596-9d93-5470dfbdb6c4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:09 crc kubenswrapper[4906]: I0227 08:54:09.297745 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-v4w5x" event={"ID":"14371a49-0d1f-4596-9d93-5470dfbdb6c4","Type":"ContainerDied","Data":"d881ec3b6d009ceb17871deabaf103bbdc4c7682df295396650d62bfaa93e17f"} Feb 27 08:54:09 crc kubenswrapper[4906]: I0227 08:54:09.297800 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d881ec3b6d009ceb17871deabaf103bbdc4c7682df295396650d62bfaa93e17f" Feb 27 08:54:09 crc kubenswrapper[4906]: I0227 08:54:09.297830 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-v4w5x" Feb 27 08:54:09 crc kubenswrapper[4906]: I0227 08:54:09.514315 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 27 08:54:09 crc kubenswrapper[4906]: I0227 08:54:09.514643 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="d3491540-7e85-4eca-b7f2-75177b711909" containerName="nova-scheduler-scheduler" containerID="cri-o://4c22a414612ccf4c95202b279acc131911598f279c5659aa3f516e3dcfbbbee9" gracePeriod=30 Feb 27 08:54:09 crc kubenswrapper[4906]: I0227 08:54:09.540233 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:54:09 crc kubenswrapper[4906]: I0227 08:54:09.540615 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3d883848-7012-4c67-aad8-02d879f33ae5" containerName="nova-metadata-log" containerID="cri-o://5021c021ac0124df8ecf9479358e6dca171987830f6da5d7e779eb70255367e7" gracePeriod=30 Feb 27 08:54:09 crc kubenswrapper[4906]: I0227 08:54:09.542651 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="3d883848-7012-4c67-aad8-02d879f33ae5" containerName="nova-metadata-metadata" containerID="cri-o://e0ae8c54a2b22aa5e543d5b04fd694e1f986afb499c37cb191e16137139a1cd4" gracePeriod=30 Feb 27 08:54:09 crc kubenswrapper[4906]: I0227 08:54:09.556382 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 27 08:54:09 crc kubenswrapper[4906]: I0227 08:54:09.556690 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="fb48918b-c493-4d20-98ab-b5c3d22d8a46" containerName="nova-api-log" containerID="cri-o://09f9346b1b75ab35e11f3585404d8ae7b1f8f9a474228409b5dd02e0517786e7" gracePeriod=30 Feb 27 08:54:09 crc kubenswrapper[4906]: I0227 08:54:09.557164 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="fb48918b-c493-4d20-98ab-b5c3d22d8a46" containerName="nova-api-api" containerID="cri-o://f6725a49bf018bf98ed8d297ac8271bc6afb28c83c400958ddd02e6981b6c325" gracePeriod=30 Feb 27 08:54:09 crc kubenswrapper[4906]: I0227 08:54:09.592618 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="fb48918b-c493-4d20-98ab-b5c3d22d8a46" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.209:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 27 08:54:09 crc kubenswrapper[4906]: I0227 08:54:09.592656 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="fb48918b-c493-4d20-98ab-b5c3d22d8a46" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.209:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 27 08:54:10 crc kubenswrapper[4906]: I0227 08:54:10.312764 4906 generic.go:334] "Generic (PLEG): container finished" podID="fb48918b-c493-4d20-98ab-b5c3d22d8a46" containerID="09f9346b1b75ab35e11f3585404d8ae7b1f8f9a474228409b5dd02e0517786e7" exitCode=143 Feb 27 08:54:10 crc kubenswrapper[4906]: I0227 08:54:10.312895 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fb48918b-c493-4d20-98ab-b5c3d22d8a46","Type":"ContainerDied","Data":"09f9346b1b75ab35e11f3585404d8ae7b1f8f9a474228409b5dd02e0517786e7"} Feb 27 08:54:10 crc kubenswrapper[4906]: I0227 08:54:10.316945 4906 generic.go:334] "Generic (PLEG): container finished" podID="3d883848-7012-4c67-aad8-02d879f33ae5" containerID="5021c021ac0124df8ecf9479358e6dca171987830f6da5d7e779eb70255367e7" exitCode=143 Feb 27 08:54:10 crc kubenswrapper[4906]: I0227 08:54:10.317051 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3d883848-7012-4c67-aad8-02d879f33ae5","Type":"ContainerDied","Data":"5021c021ac0124df8ecf9479358e6dca171987830f6da5d7e779eb70255367e7"} Feb 27 08:54:12 crc kubenswrapper[4906]: I0227 08:54:12.693469 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="3d883848-7012-4c67-aad8-02d879f33ae5" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.199:8775/\": read tcp 10.217.0.2:39156->10.217.0.199:8775: read: connection reset by peer" Feb 27 08:54:12 crc kubenswrapper[4906]: I0227 08:54:12.693465 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="3d883848-7012-4c67-aad8-02d879f33ae5" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.199:8775/\": read tcp 10.217.0.2:39170->10.217.0.199:8775: read: connection reset by peer" Feb 27 08:54:12 crc kubenswrapper[4906]: I0227 08:54:12.858575 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.007958 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8k7hp\" (UniqueName: \"kubernetes.io/projected/d3491540-7e85-4eca-b7f2-75177b711909-kube-api-access-8k7hp\") pod \"d3491540-7e85-4eca-b7f2-75177b711909\" (UID: \"d3491540-7e85-4eca-b7f2-75177b711909\") " Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.008183 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3491540-7e85-4eca-b7f2-75177b711909-combined-ca-bundle\") pod \"d3491540-7e85-4eca-b7f2-75177b711909\" (UID: \"d3491540-7e85-4eca-b7f2-75177b711909\") " Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.008418 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3491540-7e85-4eca-b7f2-75177b711909-config-data\") pod \"d3491540-7e85-4eca-b7f2-75177b711909\" (UID: \"d3491540-7e85-4eca-b7f2-75177b711909\") " Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.016125 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3491540-7e85-4eca-b7f2-75177b711909-kube-api-access-8k7hp" (OuterVolumeSpecName: "kube-api-access-8k7hp") pod "d3491540-7e85-4eca-b7f2-75177b711909" (UID: "d3491540-7e85-4eca-b7f2-75177b711909"). InnerVolumeSpecName "kube-api-access-8k7hp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.054030 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3491540-7e85-4eca-b7f2-75177b711909-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d3491540-7e85-4eca-b7f2-75177b711909" (UID: "d3491540-7e85-4eca-b7f2-75177b711909"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.070623 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d3491540-7e85-4eca-b7f2-75177b711909-config-data" (OuterVolumeSpecName: "config-data") pod "d3491540-7e85-4eca-b7f2-75177b711909" (UID: "d3491540-7e85-4eca-b7f2-75177b711909"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.111836 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8k7hp\" (UniqueName: \"kubernetes.io/projected/d3491540-7e85-4eca-b7f2-75177b711909-kube-api-access-8k7hp\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.111910 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d3491540-7e85-4eca-b7f2-75177b711909-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.111924 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d3491540-7e85-4eca-b7f2-75177b711909-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.163965 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.315981 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d883848-7012-4c67-aad8-02d879f33ae5-combined-ca-bundle\") pod \"3d883848-7012-4c67-aad8-02d879f33ae5\" (UID: \"3d883848-7012-4c67-aad8-02d879f33ae5\") " Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.316137 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfsr6\" (UniqueName: \"kubernetes.io/projected/3d883848-7012-4c67-aad8-02d879f33ae5-kube-api-access-vfsr6\") pod \"3d883848-7012-4c67-aad8-02d879f33ae5\" (UID: \"3d883848-7012-4c67-aad8-02d879f33ae5\") " Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.316238 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d883848-7012-4c67-aad8-02d879f33ae5-nova-metadata-tls-certs\") pod \"3d883848-7012-4c67-aad8-02d879f33ae5\" (UID: \"3d883848-7012-4c67-aad8-02d879f33ae5\") " Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.316299 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d883848-7012-4c67-aad8-02d879f33ae5-logs\") pod \"3d883848-7012-4c67-aad8-02d879f33ae5\" (UID: \"3d883848-7012-4c67-aad8-02d879f33ae5\") " Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.316436 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d883848-7012-4c67-aad8-02d879f33ae5-config-data\") pod \"3d883848-7012-4c67-aad8-02d879f33ae5\" (UID: \"3d883848-7012-4c67-aad8-02d879f33ae5\") " Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.317081 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d883848-7012-4c67-aad8-02d879f33ae5-logs" (OuterVolumeSpecName: "logs") pod "3d883848-7012-4c67-aad8-02d879f33ae5" (UID: "3d883848-7012-4c67-aad8-02d879f33ae5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.322111 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d883848-7012-4c67-aad8-02d879f33ae5-kube-api-access-vfsr6" (OuterVolumeSpecName: "kube-api-access-vfsr6") pod "3d883848-7012-4c67-aad8-02d879f33ae5" (UID: "3d883848-7012-4c67-aad8-02d879f33ae5"). InnerVolumeSpecName "kube-api-access-vfsr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.350190 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d883848-7012-4c67-aad8-02d879f33ae5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3d883848-7012-4c67-aad8-02d879f33ae5" (UID: "3d883848-7012-4c67-aad8-02d879f33ae5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.352846 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d883848-7012-4c67-aad8-02d879f33ae5-config-data" (OuterVolumeSpecName: "config-data") pod "3d883848-7012-4c67-aad8-02d879f33ae5" (UID: "3d883848-7012-4c67-aad8-02d879f33ae5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.354330 4906 generic.go:334] "Generic (PLEG): container finished" podID="d3491540-7e85-4eca-b7f2-75177b711909" containerID="4c22a414612ccf4c95202b279acc131911598f279c5659aa3f516e3dcfbbbee9" exitCode=0 Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.354406 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.354434 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d3491540-7e85-4eca-b7f2-75177b711909","Type":"ContainerDied","Data":"4c22a414612ccf4c95202b279acc131911598f279c5659aa3f516e3dcfbbbee9"} Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.354527 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d3491540-7e85-4eca-b7f2-75177b711909","Type":"ContainerDied","Data":"66408c5f453811e5d3178a5a186640ad65afe1ce0913e8b14fb1b7e94baa64ea"} Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.354591 4906 scope.go:117] "RemoveContainer" containerID="4c22a414612ccf4c95202b279acc131911598f279c5659aa3f516e3dcfbbbee9" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.356758 4906 generic.go:334] "Generic (PLEG): container finished" podID="3d883848-7012-4c67-aad8-02d879f33ae5" containerID="e0ae8c54a2b22aa5e543d5b04fd694e1f986afb499c37cb191e16137139a1cd4" exitCode=0 Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.356850 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3d883848-7012-4c67-aad8-02d879f33ae5","Type":"ContainerDied","Data":"e0ae8c54a2b22aa5e543d5b04fd694e1f986afb499c37cb191e16137139a1cd4"} Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.356984 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.357000 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"3d883848-7012-4c67-aad8-02d879f33ae5","Type":"ContainerDied","Data":"4c9b02090932710bbff1cc3396ca96729e098f7cd2b614c911b6c57b2c3a9333"} Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.380893 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d883848-7012-4c67-aad8-02d879f33ae5-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "3d883848-7012-4c67-aad8-02d879f33ae5" (UID: "3d883848-7012-4c67-aad8-02d879f33ae5"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.391427 4906 scope.go:117] "RemoveContainer" containerID="4c22a414612ccf4c95202b279acc131911598f279c5659aa3f516e3dcfbbbee9" Feb 27 08:54:13 crc kubenswrapper[4906]: E0227 08:54:13.392639 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4c22a414612ccf4c95202b279acc131911598f279c5659aa3f516e3dcfbbbee9\": container with ID starting with 4c22a414612ccf4c95202b279acc131911598f279c5659aa3f516e3dcfbbbee9 not found: ID does not exist" containerID="4c22a414612ccf4c95202b279acc131911598f279c5659aa3f516e3dcfbbbee9" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.392681 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4c22a414612ccf4c95202b279acc131911598f279c5659aa3f516e3dcfbbbee9"} err="failed to get container status \"4c22a414612ccf4c95202b279acc131911598f279c5659aa3f516e3dcfbbbee9\": rpc error: code = NotFound desc = could not find container \"4c22a414612ccf4c95202b279acc131911598f279c5659aa3f516e3dcfbbbee9\": container with ID starting with 4c22a414612ccf4c95202b279acc131911598f279c5659aa3f516e3dcfbbbee9 not found: ID does not exist" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.392715 4906 scope.go:117] "RemoveContainer" containerID="e0ae8c54a2b22aa5e543d5b04fd694e1f986afb499c37cb191e16137139a1cd4" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.412604 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.418947 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfsr6\" (UniqueName: \"kubernetes.io/projected/3d883848-7012-4c67-aad8-02d879f33ae5-kube-api-access-vfsr6\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.418994 4906 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/3d883848-7012-4c67-aad8-02d879f33ae5-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.419010 4906 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3d883848-7012-4c67-aad8-02d879f33ae5-logs\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.419025 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d883848-7012-4c67-aad8-02d879f33ae5-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.419038 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3d883848-7012-4c67-aad8-02d879f33ae5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.424302 4906 scope.go:117] "RemoveContainer" containerID="5021c021ac0124df8ecf9479358e6dca171987830f6da5d7e779eb70255367e7" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.442406 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.454249 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Feb 27 08:54:13 crc kubenswrapper[4906]: E0227 08:54:13.454911 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b09dc78d-c1b1-4370-938c-49fc82324733" containerName="init" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.454941 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="b09dc78d-c1b1-4370-938c-49fc82324733" containerName="init" Feb 27 08:54:13 crc kubenswrapper[4906]: E0227 08:54:13.454958 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d28859e-464e-4266-89aa-2318bc051b40" containerName="oc" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.454967 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d28859e-464e-4266-89aa-2318bc051b40" containerName="oc" Feb 27 08:54:13 crc kubenswrapper[4906]: E0227 08:54:13.455004 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d883848-7012-4c67-aad8-02d879f33ae5" containerName="nova-metadata-log" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.455017 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d883848-7012-4c67-aad8-02d879f33ae5" containerName="nova-metadata-log" Feb 27 08:54:13 crc kubenswrapper[4906]: E0227 08:54:13.455030 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3491540-7e85-4eca-b7f2-75177b711909" containerName="nova-scheduler-scheduler" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.455038 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3491540-7e85-4eca-b7f2-75177b711909" containerName="nova-scheduler-scheduler" Feb 27 08:54:13 crc kubenswrapper[4906]: E0227 08:54:13.455052 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d883848-7012-4c67-aad8-02d879f33ae5" containerName="nova-metadata-metadata" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.455063 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d883848-7012-4c67-aad8-02d879f33ae5" containerName="nova-metadata-metadata" Feb 27 08:54:13 crc kubenswrapper[4906]: E0227 08:54:13.455084 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b09dc78d-c1b1-4370-938c-49fc82324733" containerName="dnsmasq-dns" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.455093 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="b09dc78d-c1b1-4370-938c-49fc82324733" containerName="dnsmasq-dns" Feb 27 08:54:13 crc kubenswrapper[4906]: E0227 08:54:13.455115 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14371a49-0d1f-4596-9d93-5470dfbdb6c4" containerName="nova-manage" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.455125 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="14371a49-0d1f-4596-9d93-5470dfbdb6c4" containerName="nova-manage" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.455423 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d28859e-464e-4266-89aa-2318bc051b40" containerName="oc" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.455443 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d883848-7012-4c67-aad8-02d879f33ae5" containerName="nova-metadata-log" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.455464 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="14371a49-0d1f-4596-9d93-5470dfbdb6c4" containerName="nova-manage" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.455508 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3491540-7e85-4eca-b7f2-75177b711909" containerName="nova-scheduler-scheduler" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.455525 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d883848-7012-4c67-aad8-02d879f33ae5" containerName="nova-metadata-metadata" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.455535 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="b09dc78d-c1b1-4370-938c-49fc82324733" containerName="dnsmasq-dns" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.456967 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.461410 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.464362 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.517175 4906 scope.go:117] "RemoveContainer" containerID="e0ae8c54a2b22aa5e543d5b04fd694e1f986afb499c37cb191e16137139a1cd4" Feb 27 08:54:13 crc kubenswrapper[4906]: E0227 08:54:13.518032 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e0ae8c54a2b22aa5e543d5b04fd694e1f986afb499c37cb191e16137139a1cd4\": container with ID starting with e0ae8c54a2b22aa5e543d5b04fd694e1f986afb499c37cb191e16137139a1cd4 not found: ID does not exist" containerID="e0ae8c54a2b22aa5e543d5b04fd694e1f986afb499c37cb191e16137139a1cd4" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.518243 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e0ae8c54a2b22aa5e543d5b04fd694e1f986afb499c37cb191e16137139a1cd4"} err="failed to get container status \"e0ae8c54a2b22aa5e543d5b04fd694e1f986afb499c37cb191e16137139a1cd4\": rpc error: code = NotFound desc = could not find container \"e0ae8c54a2b22aa5e543d5b04fd694e1f986afb499c37cb191e16137139a1cd4\": container with ID starting with e0ae8c54a2b22aa5e543d5b04fd694e1f986afb499c37cb191e16137139a1cd4 not found: ID does not exist" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.518284 4906 scope.go:117] "RemoveContainer" containerID="5021c021ac0124df8ecf9479358e6dca171987830f6da5d7e779eb70255367e7" Feb 27 08:54:13 crc kubenswrapper[4906]: E0227 08:54:13.519067 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5021c021ac0124df8ecf9479358e6dca171987830f6da5d7e779eb70255367e7\": container with ID starting with 5021c021ac0124df8ecf9479358e6dca171987830f6da5d7e779eb70255367e7 not found: ID does not exist" containerID="5021c021ac0124df8ecf9479358e6dca171987830f6da5d7e779eb70255367e7" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.519144 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5021c021ac0124df8ecf9479358e6dca171987830f6da5d7e779eb70255367e7"} err="failed to get container status \"5021c021ac0124df8ecf9479358e6dca171987830f6da5d7e779eb70255367e7\": rpc error: code = NotFound desc = could not find container \"5021c021ac0124df8ecf9479358e6dca171987830f6da5d7e779eb70255367e7\": container with ID starting with 5021c021ac0124df8ecf9479358e6dca171987830f6da5d7e779eb70255367e7 not found: ID does not exist" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.520616 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjgv8\" (UniqueName: \"kubernetes.io/projected/3dfcdaf9-e14b-4fe9-a800-eb8998342762-kube-api-access-jjgv8\") pod \"nova-scheduler-0\" (UID: \"3dfcdaf9-e14b-4fe9-a800-eb8998342762\") " pod="openstack/nova-scheduler-0" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.520806 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dfcdaf9-e14b-4fe9-a800-eb8998342762-config-data\") pod \"nova-scheduler-0\" (UID: \"3dfcdaf9-e14b-4fe9-a800-eb8998342762\") " pod="openstack/nova-scheduler-0" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.520983 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dfcdaf9-e14b-4fe9-a800-eb8998342762-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3dfcdaf9-e14b-4fe9-a800-eb8998342762\") " pod="openstack/nova-scheduler-0" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.622542 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjgv8\" (UniqueName: \"kubernetes.io/projected/3dfcdaf9-e14b-4fe9-a800-eb8998342762-kube-api-access-jjgv8\") pod \"nova-scheduler-0\" (UID: \"3dfcdaf9-e14b-4fe9-a800-eb8998342762\") " pod="openstack/nova-scheduler-0" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.622637 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dfcdaf9-e14b-4fe9-a800-eb8998342762-config-data\") pod \"nova-scheduler-0\" (UID: \"3dfcdaf9-e14b-4fe9-a800-eb8998342762\") " pod="openstack/nova-scheduler-0" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.622680 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dfcdaf9-e14b-4fe9-a800-eb8998342762-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3dfcdaf9-e14b-4fe9-a800-eb8998342762\") " pod="openstack/nova-scheduler-0" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.627213 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3dfcdaf9-e14b-4fe9-a800-eb8998342762-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"3dfcdaf9-e14b-4fe9-a800-eb8998342762\") " pod="openstack/nova-scheduler-0" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.631931 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3dfcdaf9-e14b-4fe9-a800-eb8998342762-config-data\") pod \"nova-scheduler-0\" (UID: \"3dfcdaf9-e14b-4fe9-a800-eb8998342762\") " pod="openstack/nova-scheduler-0" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.642436 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjgv8\" (UniqueName: \"kubernetes.io/projected/3dfcdaf9-e14b-4fe9-a800-eb8998342762-kube-api-access-jjgv8\") pod \"nova-scheduler-0\" (UID: \"3dfcdaf9-e14b-4fe9-a800-eb8998342762\") " pod="openstack/nova-scheduler-0" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.781057 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.799701 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.808950 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.814607 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.817806 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.825624 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.826189 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.827095 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.931040 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8173ca1-28e6-49da-a3f4-4b8ebaf3e551-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a8173ca1-28e6-49da-a3f4-4b8ebaf3e551\") " pod="openstack/nova-metadata-0" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.931158 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztxhs\" (UniqueName: \"kubernetes.io/projected/a8173ca1-28e6-49da-a3f4-4b8ebaf3e551-kube-api-access-ztxhs\") pod \"nova-metadata-0\" (UID: \"a8173ca1-28e6-49da-a3f4-4b8ebaf3e551\") " pod="openstack/nova-metadata-0" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.931235 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8173ca1-28e6-49da-a3f4-4b8ebaf3e551-config-data\") pod \"nova-metadata-0\" (UID: \"a8173ca1-28e6-49da-a3f4-4b8ebaf3e551\") " pod="openstack/nova-metadata-0" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.931281 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8173ca1-28e6-49da-a3f4-4b8ebaf3e551-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a8173ca1-28e6-49da-a3f4-4b8ebaf3e551\") " pod="openstack/nova-metadata-0" Feb 27 08:54:13 crc kubenswrapper[4906]: I0227 08:54:13.931324 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8173ca1-28e6-49da-a3f4-4b8ebaf3e551-logs\") pod \"nova-metadata-0\" (UID: \"a8173ca1-28e6-49da-a3f4-4b8ebaf3e551\") " pod="openstack/nova-metadata-0" Feb 27 08:54:14 crc kubenswrapper[4906]: I0227 08:54:14.034268 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8173ca1-28e6-49da-a3f4-4b8ebaf3e551-config-data\") pod \"nova-metadata-0\" (UID: \"a8173ca1-28e6-49da-a3f4-4b8ebaf3e551\") " pod="openstack/nova-metadata-0" Feb 27 08:54:14 crc kubenswrapper[4906]: I0227 08:54:14.034653 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8173ca1-28e6-49da-a3f4-4b8ebaf3e551-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a8173ca1-28e6-49da-a3f4-4b8ebaf3e551\") " pod="openstack/nova-metadata-0" Feb 27 08:54:14 crc kubenswrapper[4906]: I0227 08:54:14.034694 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8173ca1-28e6-49da-a3f4-4b8ebaf3e551-logs\") pod \"nova-metadata-0\" (UID: \"a8173ca1-28e6-49da-a3f4-4b8ebaf3e551\") " pod="openstack/nova-metadata-0" Feb 27 08:54:14 crc kubenswrapper[4906]: I0227 08:54:14.034738 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8173ca1-28e6-49da-a3f4-4b8ebaf3e551-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a8173ca1-28e6-49da-a3f4-4b8ebaf3e551\") " pod="openstack/nova-metadata-0" Feb 27 08:54:14 crc kubenswrapper[4906]: I0227 08:54:14.034803 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztxhs\" (UniqueName: \"kubernetes.io/projected/a8173ca1-28e6-49da-a3f4-4b8ebaf3e551-kube-api-access-ztxhs\") pod \"nova-metadata-0\" (UID: \"a8173ca1-28e6-49da-a3f4-4b8ebaf3e551\") " pod="openstack/nova-metadata-0" Feb 27 08:54:14 crc kubenswrapper[4906]: I0227 08:54:14.035395 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a8173ca1-28e6-49da-a3f4-4b8ebaf3e551-logs\") pod \"nova-metadata-0\" (UID: \"a8173ca1-28e6-49da-a3f4-4b8ebaf3e551\") " pod="openstack/nova-metadata-0" Feb 27 08:54:14 crc kubenswrapper[4906]: I0227 08:54:14.040723 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8173ca1-28e6-49da-a3f4-4b8ebaf3e551-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"a8173ca1-28e6-49da-a3f4-4b8ebaf3e551\") " pod="openstack/nova-metadata-0" Feb 27 08:54:14 crc kubenswrapper[4906]: I0227 08:54:14.041653 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8173ca1-28e6-49da-a3f4-4b8ebaf3e551-config-data\") pod \"nova-metadata-0\" (UID: \"a8173ca1-28e6-49da-a3f4-4b8ebaf3e551\") " pod="openstack/nova-metadata-0" Feb 27 08:54:14 crc kubenswrapper[4906]: I0227 08:54:14.043905 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/a8173ca1-28e6-49da-a3f4-4b8ebaf3e551-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"a8173ca1-28e6-49da-a3f4-4b8ebaf3e551\") " pod="openstack/nova-metadata-0" Feb 27 08:54:14 crc kubenswrapper[4906]: I0227 08:54:14.054450 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztxhs\" (UniqueName: \"kubernetes.io/projected/a8173ca1-28e6-49da-a3f4-4b8ebaf3e551-kube-api-access-ztxhs\") pod \"nova-metadata-0\" (UID: \"a8173ca1-28e6-49da-a3f4-4b8ebaf3e551\") " pod="openstack/nova-metadata-0" Feb 27 08:54:14 crc kubenswrapper[4906]: I0227 08:54:14.237855 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Feb 27 08:54:14 crc kubenswrapper[4906]: I0227 08:54:14.318019 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Feb 27 08:54:14 crc kubenswrapper[4906]: W0227 08:54:14.324713 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3dfcdaf9_e14b_4fe9_a800_eb8998342762.slice/crio-32ea38c76debbe45cb5577940f3d025ee6916759b18d9a90942d9782b9cfb51d WatchSource:0}: Error finding container 32ea38c76debbe45cb5577940f3d025ee6916759b18d9a90942d9782b9cfb51d: Status 404 returned error can't find the container with id 32ea38c76debbe45cb5577940f3d025ee6916759b18d9a90942d9782b9cfb51d Feb 27 08:54:14 crc kubenswrapper[4906]: I0227 08:54:14.373254 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3dfcdaf9-e14b-4fe9-a800-eb8998342762","Type":"ContainerStarted","Data":"32ea38c76debbe45cb5577940f3d025ee6916759b18d9a90942d9782b9cfb51d"} Feb 27 08:54:14 crc kubenswrapper[4906]: I0227 08:54:14.573414 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d883848-7012-4c67-aad8-02d879f33ae5" path="/var/lib/kubelet/pods/3d883848-7012-4c67-aad8-02d879f33ae5/volumes" Feb 27 08:54:14 crc kubenswrapper[4906]: I0227 08:54:14.574539 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3491540-7e85-4eca-b7f2-75177b711909" path="/var/lib/kubelet/pods/d3491540-7e85-4eca-b7f2-75177b711909/volumes" Feb 27 08:54:14 crc kubenswrapper[4906]: I0227 08:54:14.755699 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Feb 27 08:54:14 crc kubenswrapper[4906]: W0227 08:54:14.756049 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8173ca1_28e6_49da_a3f4_4b8ebaf3e551.slice/crio-5f6f39d258ebaaadaedb27b01f6f387611f62cf8f5c06255f91f0bdc122c2037 WatchSource:0}: Error finding container 5f6f39d258ebaaadaedb27b01f6f387611f62cf8f5c06255f91f0bdc122c2037: Status 404 returned error can't find the container with id 5f6f39d258ebaaadaedb27b01f6f387611f62cf8f5c06255f91f0bdc122c2037 Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.389229 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"3dfcdaf9-e14b-4fe9-a800-eb8998342762","Type":"ContainerStarted","Data":"6bd3c188e2d13d3ad1a18b825580e9655b4aaff4baaa756ae14dd8a969066e86"} Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.393861 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a8173ca1-28e6-49da-a3f4-4b8ebaf3e551","Type":"ContainerStarted","Data":"6ff8106607a496464ac6652aab83fdae67f1ab507044fc64929b4225b041e718"} Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.393953 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a8173ca1-28e6-49da-a3f4-4b8ebaf3e551","Type":"ContainerStarted","Data":"fc328bac8b7cf0470f5e7a3c2140f7fe1d1b8ad6ec652b3908eef53b9800fac9"} Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.393967 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"a8173ca1-28e6-49da-a3f4-4b8ebaf3e551","Type":"ContainerStarted","Data":"5f6f39d258ebaaadaedb27b01f6f387611f62cf8f5c06255f91f0bdc122c2037"} Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.399219 4906 generic.go:334] "Generic (PLEG): container finished" podID="fb48918b-c493-4d20-98ab-b5c3d22d8a46" containerID="f6725a49bf018bf98ed8d297ac8271bc6afb28c83c400958ddd02e6981b6c325" exitCode=0 Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.399319 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fb48918b-c493-4d20-98ab-b5c3d22d8a46","Type":"ContainerDied","Data":"f6725a49bf018bf98ed8d297ac8271bc6afb28c83c400958ddd02e6981b6c325"} Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.414762 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.414735004 podStartE2EDuration="2.414735004s" podCreationTimestamp="2026-02-27 08:54:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:54:15.40923637 +0000 UTC m=+1553.803637990" watchObservedRunningTime="2026-02-27 08:54:15.414735004 +0000 UTC m=+1553.809136614" Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.444188 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.449387 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.449369024 podStartE2EDuration="2.449369024s" podCreationTimestamp="2026-02-27 08:54:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:54:15.43554056 +0000 UTC m=+1553.829942170" watchObservedRunningTime="2026-02-27 08:54:15.449369024 +0000 UTC m=+1553.843770634" Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.575753 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-combined-ca-bundle\") pod \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.576375 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb48918b-c493-4d20-98ab-b5c3d22d8a46-logs\") pod \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.576447 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-config-data\") pod \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.576818 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-internal-tls-certs\") pod \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.576952 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-public-tls-certs\") pod \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.577235 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb48918b-c493-4d20-98ab-b5c3d22d8a46-logs" (OuterVolumeSpecName: "logs") pod "fb48918b-c493-4d20-98ab-b5c3d22d8a46" (UID: "fb48918b-c493-4d20-98ab-b5c3d22d8a46"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.577447 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m2t98\" (UniqueName: \"kubernetes.io/projected/fb48918b-c493-4d20-98ab-b5c3d22d8a46-kube-api-access-m2t98\") pod \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\" (UID: \"fb48918b-c493-4d20-98ab-b5c3d22d8a46\") " Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.578030 4906 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fb48918b-c493-4d20-98ab-b5c3d22d8a46-logs\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.582241 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb48918b-c493-4d20-98ab-b5c3d22d8a46-kube-api-access-m2t98" (OuterVolumeSpecName: "kube-api-access-m2t98") pod "fb48918b-c493-4d20-98ab-b5c3d22d8a46" (UID: "fb48918b-c493-4d20-98ab-b5c3d22d8a46"). InnerVolumeSpecName "kube-api-access-m2t98". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.607993 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fb48918b-c493-4d20-98ab-b5c3d22d8a46" (UID: "fb48918b-c493-4d20-98ab-b5c3d22d8a46"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.610602 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-config-data" (OuterVolumeSpecName: "config-data") pod "fb48918b-c493-4d20-98ab-b5c3d22d8a46" (UID: "fb48918b-c493-4d20-98ab-b5c3d22d8a46"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.635708 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "fb48918b-c493-4d20-98ab-b5c3d22d8a46" (UID: "fb48918b-c493-4d20-98ab-b5c3d22d8a46"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.652109 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "fb48918b-c493-4d20-98ab-b5c3d22d8a46" (UID: "fb48918b-c493-4d20-98ab-b5c3d22d8a46"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.680594 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.680653 4906 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.680667 4906 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.680677 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m2t98\" (UniqueName: \"kubernetes.io/projected/fb48918b-c493-4d20-98ab-b5c3d22d8a46-kube-api-access-m2t98\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:15 crc kubenswrapper[4906]: I0227 08:54:15.680737 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb48918b-c493-4d20-98ab-b5c3d22d8a46-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.414827 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fb48918b-c493-4d20-98ab-b5c3d22d8a46","Type":"ContainerDied","Data":"0d35784fc9d2f539685653730c2b52e56fb4b7525e985369011a215647bc2680"} Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.414935 4906 scope.go:117] "RemoveContainer" containerID="f6725a49bf018bf98ed8d297ac8271bc6afb28c83c400958ddd02e6981b6c325" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.416553 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.457862 4906 scope.go:117] "RemoveContainer" containerID="09f9346b1b75ab35e11f3585404d8ae7b1f8f9a474228409b5dd02e0517786e7" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.469209 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.485301 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.502110 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Feb 27 08:54:16 crc kubenswrapper[4906]: E0227 08:54:16.502570 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb48918b-c493-4d20-98ab-b5c3d22d8a46" containerName="nova-api-api" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.502604 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb48918b-c493-4d20-98ab-b5c3d22d8a46" containerName="nova-api-api" Feb 27 08:54:16 crc kubenswrapper[4906]: E0227 08:54:16.502639 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb48918b-c493-4d20-98ab-b5c3d22d8a46" containerName="nova-api-log" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.502650 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb48918b-c493-4d20-98ab-b5c3d22d8a46" containerName="nova-api-log" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.502822 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb48918b-c493-4d20-98ab-b5c3d22d8a46" containerName="nova-api-api" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.502848 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb48918b-c493-4d20-98ab-b5c3d22d8a46" containerName="nova-api-log" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.503991 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.509530 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.510490 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.510742 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.520774 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.565551 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb48918b-c493-4d20-98ab-b5c3d22d8a46" path="/var/lib/kubelet/pods/fb48918b-c493-4d20-98ab-b5c3d22d8a46/volumes" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.600529 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/42ba6f7d-0f99-4b31-82bc-4366cec7c4a4-logs\") pod \"nova-api-0\" (UID: \"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4\") " pod="openstack/nova-api-0" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.600614 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/42ba6f7d-0f99-4b31-82bc-4366cec7c4a4-internal-tls-certs\") pod \"nova-api-0\" (UID: \"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4\") " pod="openstack/nova-api-0" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.600656 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t44t8\" (UniqueName: \"kubernetes.io/projected/42ba6f7d-0f99-4b31-82bc-4366cec7c4a4-kube-api-access-t44t8\") pod \"nova-api-0\" (UID: \"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4\") " pod="openstack/nova-api-0" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.600690 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42ba6f7d-0f99-4b31-82bc-4366cec7c4a4-config-data\") pod \"nova-api-0\" (UID: \"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4\") " pod="openstack/nova-api-0" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.600730 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/42ba6f7d-0f99-4b31-82bc-4366cec7c4a4-public-tls-certs\") pod \"nova-api-0\" (UID: \"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4\") " pod="openstack/nova-api-0" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.600777 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42ba6f7d-0f99-4b31-82bc-4366cec7c4a4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4\") " pod="openstack/nova-api-0" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.702639 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t44t8\" (UniqueName: \"kubernetes.io/projected/42ba6f7d-0f99-4b31-82bc-4366cec7c4a4-kube-api-access-t44t8\") pod \"nova-api-0\" (UID: \"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4\") " pod="openstack/nova-api-0" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.703148 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42ba6f7d-0f99-4b31-82bc-4366cec7c4a4-config-data\") pod \"nova-api-0\" (UID: \"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4\") " pod="openstack/nova-api-0" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.703195 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/42ba6f7d-0f99-4b31-82bc-4366cec7c4a4-public-tls-certs\") pod \"nova-api-0\" (UID: \"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4\") " pod="openstack/nova-api-0" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.703243 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42ba6f7d-0f99-4b31-82bc-4366cec7c4a4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4\") " pod="openstack/nova-api-0" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.703301 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/42ba6f7d-0f99-4b31-82bc-4366cec7c4a4-logs\") pod \"nova-api-0\" (UID: \"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4\") " pod="openstack/nova-api-0" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.703343 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/42ba6f7d-0f99-4b31-82bc-4366cec7c4a4-internal-tls-certs\") pod \"nova-api-0\" (UID: \"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4\") " pod="openstack/nova-api-0" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.705084 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/42ba6f7d-0f99-4b31-82bc-4366cec7c4a4-logs\") pod \"nova-api-0\" (UID: \"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4\") " pod="openstack/nova-api-0" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.708842 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42ba6f7d-0f99-4b31-82bc-4366cec7c4a4-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4\") " pod="openstack/nova-api-0" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.709520 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/42ba6f7d-0f99-4b31-82bc-4366cec7c4a4-public-tls-certs\") pod \"nova-api-0\" (UID: \"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4\") " pod="openstack/nova-api-0" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.709948 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/42ba6f7d-0f99-4b31-82bc-4366cec7c4a4-config-data\") pod \"nova-api-0\" (UID: \"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4\") " pod="openstack/nova-api-0" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.712707 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/42ba6f7d-0f99-4b31-82bc-4366cec7c4a4-internal-tls-certs\") pod \"nova-api-0\" (UID: \"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4\") " pod="openstack/nova-api-0" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.722601 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t44t8\" (UniqueName: \"kubernetes.io/projected/42ba6f7d-0f99-4b31-82bc-4366cec7c4a4-kube-api-access-t44t8\") pod \"nova-api-0\" (UID: \"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4\") " pod="openstack/nova-api-0" Feb 27 08:54:16 crc kubenswrapper[4906]: I0227 08:54:16.834489 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Feb 27 08:54:17 crc kubenswrapper[4906]: W0227 08:54:17.344707 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod42ba6f7d_0f99_4b31_82bc_4366cec7c4a4.slice/crio-ecdc6c7b86566697f378e553ee0bd434e97752459c62faf59c995778110bafc1 WatchSource:0}: Error finding container ecdc6c7b86566697f378e553ee0bd434e97752459c62faf59c995778110bafc1: Status 404 returned error can't find the container with id ecdc6c7b86566697f378e553ee0bd434e97752459c62faf59c995778110bafc1 Feb 27 08:54:17 crc kubenswrapper[4906]: I0227 08:54:17.348417 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Feb 27 08:54:17 crc kubenswrapper[4906]: I0227 08:54:17.428276 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4","Type":"ContainerStarted","Data":"ecdc6c7b86566697f378e553ee0bd434e97752459c62faf59c995778110bafc1"} Feb 27 08:54:18 crc kubenswrapper[4906]: I0227 08:54:18.439033 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4","Type":"ContainerStarted","Data":"5cae6e0f9a879060d61427b86bbc44b8e9becbd0bd80f3f5bea2ce294172cd59"} Feb 27 08:54:18 crc kubenswrapper[4906]: I0227 08:54:18.439953 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"42ba6f7d-0f99-4b31-82bc-4366cec7c4a4","Type":"ContainerStarted","Data":"6c7559ca5ed1cdb01851cdb88ebc6f7cb9227128060db590252fb2112df828e4"} Feb 27 08:54:18 crc kubenswrapper[4906]: I0227 08:54:18.472262 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.472212343 podStartE2EDuration="2.472212343s" podCreationTimestamp="2026-02-27 08:54:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:54:18.457567648 +0000 UTC m=+1556.851969278" watchObservedRunningTime="2026-02-27 08:54:18.472212343 +0000 UTC m=+1556.866614033" Feb 27 08:54:18 crc kubenswrapper[4906]: I0227 08:54:18.810308 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Feb 27 08:54:19 crc kubenswrapper[4906]: I0227 08:54:19.238549 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 27 08:54:19 crc kubenswrapper[4906]: I0227 08:54:19.238611 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Feb 27 08:54:23 crc kubenswrapper[4906]: I0227 08:54:23.810463 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Feb 27 08:54:23 crc kubenswrapper[4906]: I0227 08:54:23.849468 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Feb 27 08:54:24 crc kubenswrapper[4906]: I0227 08:54:24.238554 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 27 08:54:24 crc kubenswrapper[4906]: I0227 08:54:24.238620 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Feb 27 08:54:24 crc kubenswrapper[4906]: I0227 08:54:24.576996 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Feb 27 08:54:24 crc kubenswrapper[4906]: I0227 08:54:24.845012 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:54:24 crc kubenswrapper[4906]: I0227 08:54:24.845115 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:54:24 crc kubenswrapper[4906]: I0227 08:54:24.845189 4906 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:54:24 crc kubenswrapper[4906]: I0227 08:54:24.846366 4906 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"400ed111d9890552c3d11872a2da1327403afcb99497b3740757d1aed8ae0dbb"} pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 27 08:54:24 crc kubenswrapper[4906]: I0227 08:54:24.846445 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" containerID="cri-o://400ed111d9890552c3d11872a2da1327403afcb99497b3740757d1aed8ae0dbb" gracePeriod=600 Feb 27 08:54:25 crc kubenswrapper[4906]: I0227 08:54:25.251313 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="a8173ca1-28e6-49da-a3f4-4b8ebaf3e551" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.213:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 27 08:54:25 crc kubenswrapper[4906]: I0227 08:54:25.251366 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="a8173ca1-28e6-49da-a3f4-4b8ebaf3e551" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.213:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 27 08:54:25 crc kubenswrapper[4906]: I0227 08:54:25.553784 4906 generic.go:334] "Generic (PLEG): container finished" podID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerID="400ed111d9890552c3d11872a2da1327403afcb99497b3740757d1aed8ae0dbb" exitCode=0 Feb 27 08:54:25 crc kubenswrapper[4906]: I0227 08:54:25.553925 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerDied","Data":"400ed111d9890552c3d11872a2da1327403afcb99497b3740757d1aed8ae0dbb"} Feb 27 08:54:25 crc kubenswrapper[4906]: I0227 08:54:25.554028 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerStarted","Data":"a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13"} Feb 27 08:54:25 crc kubenswrapper[4906]: I0227 08:54:25.555437 4906 scope.go:117] "RemoveContainer" containerID="5fcba02c6ff9fc89671410ba4e06ef0b888d1413d1b9d9a87dec063811640cb4" Feb 27 08:54:26 crc kubenswrapper[4906]: I0227 08:54:26.836332 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 27 08:54:26 crc kubenswrapper[4906]: I0227 08:54:26.836926 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Feb 27 08:54:27 crc kubenswrapper[4906]: I0227 08:54:27.854178 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="42ba6f7d-0f99-4b31-82bc-4366cec7c4a4" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.214:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Feb 27 08:54:27 crc kubenswrapper[4906]: I0227 08:54:27.854193 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="42ba6f7d-0f99-4b31-82bc-4366cec7c4a4" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.214:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Feb 27 08:54:28 crc kubenswrapper[4906]: I0227 08:54:28.586403 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Feb 27 08:54:34 crc kubenswrapper[4906]: I0227 08:54:34.248304 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 27 08:54:34 crc kubenswrapper[4906]: I0227 08:54:34.253650 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Feb 27 08:54:34 crc kubenswrapper[4906]: I0227 08:54:34.255931 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 27 08:54:34 crc kubenswrapper[4906]: I0227 08:54:34.676539 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Feb 27 08:54:36 crc kubenswrapper[4906]: I0227 08:54:36.843737 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 27 08:54:36 crc kubenswrapper[4906]: I0227 08:54:36.845419 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 27 08:54:36 crc kubenswrapper[4906]: I0227 08:54:36.845484 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Feb 27 08:54:36 crc kubenswrapper[4906]: I0227 08:54:36.856377 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 27 08:54:37 crc kubenswrapper[4906]: I0227 08:54:37.708507 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Feb 27 08:54:37 crc kubenswrapper[4906]: I0227 08:54:37.715289 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Feb 27 08:54:45 crc kubenswrapper[4906]: I0227 08:54:45.607158 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 27 08:54:46 crc kubenswrapper[4906]: I0227 08:54:46.822997 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 27 08:54:51 crc kubenswrapper[4906]: I0227 08:54:51.366101 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="288d23ef-ae52-4275-a827-ebf77b2823ea" containerName="rabbitmq" containerID="cri-o://372fbf4be4107581f65f344e2a64dfbaa7aba41eff73f9f1bda2902cf736d938" gracePeriod=604795 Feb 27 08:54:51 crc kubenswrapper[4906]: I0227 08:54:51.948115 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="752c995e-5d01-4705-ab26-be06da61290d" containerName="rabbitmq" containerID="cri-o://c55e45eebfb36463a729e750878b578c65a99b1c90cf40c11b9d855a051e1b92" gracePeriod=604795 Feb 27 08:54:54 crc kubenswrapper[4906]: I0227 08:54:54.172628 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="752c995e-5d01-4705-ab26-be06da61290d" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.102:5671: connect: connection refused" Feb 27 08:54:54 crc kubenswrapper[4906]: I0227 08:54:54.507710 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="288d23ef-ae52-4275-a827-ebf77b2823ea" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.103:5671: connect: connection refused" Feb 27 08:54:57 crc kubenswrapper[4906]: I0227 08:54:57.935862 4906 generic.go:334] "Generic (PLEG): container finished" podID="288d23ef-ae52-4275-a827-ebf77b2823ea" containerID="372fbf4be4107581f65f344e2a64dfbaa7aba41eff73f9f1bda2902cf736d938" exitCode=0 Feb 27 08:54:57 crc kubenswrapper[4906]: I0227 08:54:57.936361 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"288d23ef-ae52-4275-a827-ebf77b2823ea","Type":"ContainerDied","Data":"372fbf4be4107581f65f344e2a64dfbaa7aba41eff73f9f1bda2902cf736d938"} Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.052172 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.203797 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-confd\") pod \"288d23ef-ae52-4275-a827-ebf77b2823ea\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.203841 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/288d23ef-ae52-4275-a827-ebf77b2823ea-server-conf\") pod \"288d23ef-ae52-4275-a827-ebf77b2823ea\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.203938 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-erlang-cookie\") pod \"288d23ef-ae52-4275-a827-ebf77b2823ea\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.203988 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/288d23ef-ae52-4275-a827-ebf77b2823ea-pod-info\") pod \"288d23ef-ae52-4275-a827-ebf77b2823ea\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.204060 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-tls\") pod \"288d23ef-ae52-4275-a827-ebf77b2823ea\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.204103 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/288d23ef-ae52-4275-a827-ebf77b2823ea-erlang-cookie-secret\") pod \"288d23ef-ae52-4275-a827-ebf77b2823ea\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.204143 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-plugins\") pod \"288d23ef-ae52-4275-a827-ebf77b2823ea\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.204163 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"288d23ef-ae52-4275-a827-ebf77b2823ea\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.204228 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-75rbj\" (UniqueName: \"kubernetes.io/projected/288d23ef-ae52-4275-a827-ebf77b2823ea-kube-api-access-75rbj\") pod \"288d23ef-ae52-4275-a827-ebf77b2823ea\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.204268 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/288d23ef-ae52-4275-a827-ebf77b2823ea-config-data\") pod \"288d23ef-ae52-4275-a827-ebf77b2823ea\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.204293 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/288d23ef-ae52-4275-a827-ebf77b2823ea-plugins-conf\") pod \"288d23ef-ae52-4275-a827-ebf77b2823ea\" (UID: \"288d23ef-ae52-4275-a827-ebf77b2823ea\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.204704 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "288d23ef-ae52-4275-a827-ebf77b2823ea" (UID: "288d23ef-ae52-4275-a827-ebf77b2823ea"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.205025 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "288d23ef-ae52-4275-a827-ebf77b2823ea" (UID: "288d23ef-ae52-4275-a827-ebf77b2823ea"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.210496 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/288d23ef-ae52-4275-a827-ebf77b2823ea-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "288d23ef-ae52-4275-a827-ebf77b2823ea" (UID: "288d23ef-ae52-4275-a827-ebf77b2823ea"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.212079 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "288d23ef-ae52-4275-a827-ebf77b2823ea" (UID: "288d23ef-ae52-4275-a827-ebf77b2823ea"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.215578 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage09-crc" (OuterVolumeSpecName: "persistence") pod "288d23ef-ae52-4275-a827-ebf77b2823ea" (UID: "288d23ef-ae52-4275-a827-ebf77b2823ea"). InnerVolumeSpecName "local-storage09-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.229141 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/288d23ef-ae52-4275-a827-ebf77b2823ea-pod-info" (OuterVolumeSpecName: "pod-info") pod "288d23ef-ae52-4275-a827-ebf77b2823ea" (UID: "288d23ef-ae52-4275-a827-ebf77b2823ea"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.231117 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/288d23ef-ae52-4275-a827-ebf77b2823ea-kube-api-access-75rbj" (OuterVolumeSpecName: "kube-api-access-75rbj") pod "288d23ef-ae52-4275-a827-ebf77b2823ea" (UID: "288d23ef-ae52-4275-a827-ebf77b2823ea"). InnerVolumeSpecName "kube-api-access-75rbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.245183 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/288d23ef-ae52-4275-a827-ebf77b2823ea-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "288d23ef-ae52-4275-a827-ebf77b2823ea" (UID: "288d23ef-ae52-4275-a827-ebf77b2823ea"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.249492 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/288d23ef-ae52-4275-a827-ebf77b2823ea-config-data" (OuterVolumeSpecName: "config-data") pod "288d23ef-ae52-4275-a827-ebf77b2823ea" (UID: "288d23ef-ae52-4275-a827-ebf77b2823ea"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.307159 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/288d23ef-ae52-4275-a827-ebf77b2823ea-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.307201 4906 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/288d23ef-ae52-4275-a827-ebf77b2823ea-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.307214 4906 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.307226 4906 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/288d23ef-ae52-4275-a827-ebf77b2823ea-pod-info\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.307237 4906 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.307249 4906 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/288d23ef-ae52-4275-a827-ebf77b2823ea-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.307261 4906 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.307308 4906 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.307322 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-75rbj\" (UniqueName: \"kubernetes.io/projected/288d23ef-ae52-4275-a827-ebf77b2823ea-kube-api-access-75rbj\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.312136 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/288d23ef-ae52-4275-a827-ebf77b2823ea-server-conf" (OuterVolumeSpecName: "server-conf") pod "288d23ef-ae52-4275-a827-ebf77b2823ea" (UID: "288d23ef-ae52-4275-a827-ebf77b2823ea"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.329476 4906 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage09-crc" (UniqueName: "kubernetes.io/local-volume/local-storage09-crc") on node "crc" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.357113 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "288d23ef-ae52-4275-a827-ebf77b2823ea" (UID: "288d23ef-ae52-4275-a827-ebf77b2823ea"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.415520 4906 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/288d23ef-ae52-4275-a827-ebf77b2823ea-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.415561 4906 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/288d23ef-ae52-4275-a827-ebf77b2823ea-server-conf\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.415572 4906 reconciler_common.go:293] "Volume detached for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.537347 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.627638 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"752c995e-5d01-4705-ab26-be06da61290d\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.641811 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/752c995e-5d01-4705-ab26-be06da61290d-server-conf\") pod \"752c995e-5d01-4705-ab26-be06da61290d\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.642194 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-tls\") pod \"752c995e-5d01-4705-ab26-be06da61290d\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.642447 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-erlang-cookie\") pod \"752c995e-5d01-4705-ab26-be06da61290d\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.642592 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/752c995e-5d01-4705-ab26-be06da61290d-pod-info\") pod \"752c995e-5d01-4705-ab26-be06da61290d\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.642671 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/752c995e-5d01-4705-ab26-be06da61290d-erlang-cookie-secret\") pod \"752c995e-5d01-4705-ab26-be06da61290d\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.642812 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/752c995e-5d01-4705-ab26-be06da61290d-config-data\") pod \"752c995e-5d01-4705-ab26-be06da61290d\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.642945 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ckxnq\" (UniqueName: \"kubernetes.io/projected/752c995e-5d01-4705-ab26-be06da61290d-kube-api-access-ckxnq\") pod \"752c995e-5d01-4705-ab26-be06da61290d\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.643078 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/752c995e-5d01-4705-ab26-be06da61290d-plugins-conf\") pod \"752c995e-5d01-4705-ab26-be06da61290d\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.643218 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-plugins\") pod \"752c995e-5d01-4705-ab26-be06da61290d\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.644136 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-confd\") pod \"752c995e-5d01-4705-ab26-be06da61290d\" (UID: \"752c995e-5d01-4705-ab26-be06da61290d\") " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.643105 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "752c995e-5d01-4705-ab26-be06da61290d" (UID: "752c995e-5d01-4705-ab26-be06da61290d"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.643918 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/752c995e-5d01-4705-ab26-be06da61290d-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "752c995e-5d01-4705-ab26-be06da61290d" (UID: "752c995e-5d01-4705-ab26-be06da61290d"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.649195 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/752c995e-5d01-4705-ab26-be06da61290d-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "752c995e-5d01-4705-ab26-be06da61290d" (UID: "752c995e-5d01-4705-ab26-be06da61290d"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.649216 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "752c995e-5d01-4705-ab26-be06da61290d" (UID: "752c995e-5d01-4705-ab26-be06da61290d"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.649316 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "752c995e-5d01-4705-ab26-be06da61290d" (UID: "752c995e-5d01-4705-ab26-be06da61290d"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.656553 4906 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.656613 4906 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.656649 4906 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/752c995e-5d01-4705-ab26-be06da61290d-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.656667 4906 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/752c995e-5d01-4705-ab26-be06da61290d-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.656689 4906 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.657045 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "752c995e-5d01-4705-ab26-be06da61290d" (UID: "752c995e-5d01-4705-ab26-be06da61290d"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.661164 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/752c995e-5d01-4705-ab26-be06da61290d-kube-api-access-ckxnq" (OuterVolumeSpecName: "kube-api-access-ckxnq") pod "752c995e-5d01-4705-ab26-be06da61290d" (UID: "752c995e-5d01-4705-ab26-be06da61290d"). InnerVolumeSpecName "kube-api-access-ckxnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.664410 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/752c995e-5d01-4705-ab26-be06da61290d-pod-info" (OuterVolumeSpecName: "pod-info") pod "752c995e-5d01-4705-ab26-be06da61290d" (UID: "752c995e-5d01-4705-ab26-be06da61290d"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.691659 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/752c995e-5d01-4705-ab26-be06da61290d-config-data" (OuterVolumeSpecName: "config-data") pod "752c995e-5d01-4705-ab26-be06da61290d" (UID: "752c995e-5d01-4705-ab26-be06da61290d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.706726 4906 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.720809 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/752c995e-5d01-4705-ab26-be06da61290d-server-conf" (OuterVolumeSpecName: "server-conf") pod "752c995e-5d01-4705-ab26-be06da61290d" (UID: "752c995e-5d01-4705-ab26-be06da61290d"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.758725 4906 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/752c995e-5d01-4705-ab26-be06da61290d-pod-info\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.758777 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/752c995e-5d01-4705-ab26-be06da61290d-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.758805 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ckxnq\" (UniqueName: \"kubernetes.io/projected/752c995e-5d01-4705-ab26-be06da61290d-kube-api-access-ckxnq\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.758817 4906 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.758827 4906 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/752c995e-5d01-4705-ab26-be06da61290d-server-conf\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.758840 4906 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.784409 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "752c995e-5d01-4705-ab26-be06da61290d" (UID: "752c995e-5d01-4705-ab26-be06da61290d"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.861616 4906 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/752c995e-5d01-4705-ab26-be06da61290d-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.949896 4906 generic.go:334] "Generic (PLEG): container finished" podID="752c995e-5d01-4705-ab26-be06da61290d" containerID="c55e45eebfb36463a729e750878b578c65a99b1c90cf40c11b9d855a051e1b92" exitCode=0 Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.949999 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.950020 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"752c995e-5d01-4705-ab26-be06da61290d","Type":"ContainerDied","Data":"c55e45eebfb36463a729e750878b578c65a99b1c90cf40c11b9d855a051e1b92"} Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.950101 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"752c995e-5d01-4705-ab26-be06da61290d","Type":"ContainerDied","Data":"0b3848af1ef8ad5f5f2bdeb7e8c7d4586e9e2122de9b070dfc1a9f601d2323fd"} Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.950122 4906 scope.go:117] "RemoveContainer" containerID="c55e45eebfb36463a729e750878b578c65a99b1c90cf40c11b9d855a051e1b92" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.955224 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"288d23ef-ae52-4275-a827-ebf77b2823ea","Type":"ContainerDied","Data":"c98fa7af7488ca0cd14703aae49587e30fca7342836ae3af8ebb8b61ed959025"} Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.955355 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 27 08:54:58 crc kubenswrapper[4906]: I0227 08:54:58.986064 4906 scope.go:117] "RemoveContainer" containerID="6767fed6a0f4f2867823c279799ee9b4addfbdd810330c7bd2d488d2b5313690" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.004480 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.022015 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.026260 4906 scope.go:117] "RemoveContainer" containerID="c55e45eebfb36463a729e750878b578c65a99b1c90cf40c11b9d855a051e1b92" Feb 27 08:54:59 crc kubenswrapper[4906]: E0227 08:54:59.028698 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c55e45eebfb36463a729e750878b578c65a99b1c90cf40c11b9d855a051e1b92\": container with ID starting with c55e45eebfb36463a729e750878b578c65a99b1c90cf40c11b9d855a051e1b92 not found: ID does not exist" containerID="c55e45eebfb36463a729e750878b578c65a99b1c90cf40c11b9d855a051e1b92" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.028775 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c55e45eebfb36463a729e750878b578c65a99b1c90cf40c11b9d855a051e1b92"} err="failed to get container status \"c55e45eebfb36463a729e750878b578c65a99b1c90cf40c11b9d855a051e1b92\": rpc error: code = NotFound desc = could not find container \"c55e45eebfb36463a729e750878b578c65a99b1c90cf40c11b9d855a051e1b92\": container with ID starting with c55e45eebfb36463a729e750878b578c65a99b1c90cf40c11b9d855a051e1b92 not found: ID does not exist" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.028818 4906 scope.go:117] "RemoveContainer" containerID="6767fed6a0f4f2867823c279799ee9b4addfbdd810330c7bd2d488d2b5313690" Feb 27 08:54:59 crc kubenswrapper[4906]: E0227 08:54:59.031188 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6767fed6a0f4f2867823c279799ee9b4addfbdd810330c7bd2d488d2b5313690\": container with ID starting with 6767fed6a0f4f2867823c279799ee9b4addfbdd810330c7bd2d488d2b5313690 not found: ID does not exist" containerID="6767fed6a0f4f2867823c279799ee9b4addfbdd810330c7bd2d488d2b5313690" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.031241 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6767fed6a0f4f2867823c279799ee9b4addfbdd810330c7bd2d488d2b5313690"} err="failed to get container status \"6767fed6a0f4f2867823c279799ee9b4addfbdd810330c7bd2d488d2b5313690\": rpc error: code = NotFound desc = could not find container \"6767fed6a0f4f2867823c279799ee9b4addfbdd810330c7bd2d488d2b5313690\": container with ID starting with 6767fed6a0f4f2867823c279799ee9b4addfbdd810330c7bd2d488d2b5313690 not found: ID does not exist" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.031277 4906 scope.go:117] "RemoveContainer" containerID="372fbf4be4107581f65f344e2a64dfbaa7aba41eff73f9f1bda2902cf736d938" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.045148 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.086578 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.092557 4906 scope.go:117] "RemoveContainer" containerID="648e3776fe4d7a47348f9297e5b57825d7f70d4d2e766a296dfac824e916eb9b" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.104076 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Feb 27 08:54:59 crc kubenswrapper[4906]: E0227 08:54:59.104752 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="752c995e-5d01-4705-ab26-be06da61290d" containerName="setup-container" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.104775 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="752c995e-5d01-4705-ab26-be06da61290d" containerName="setup-container" Feb 27 08:54:59 crc kubenswrapper[4906]: E0227 08:54:59.104814 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="288d23ef-ae52-4275-a827-ebf77b2823ea" containerName="rabbitmq" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.104824 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="288d23ef-ae52-4275-a827-ebf77b2823ea" containerName="rabbitmq" Feb 27 08:54:59 crc kubenswrapper[4906]: E0227 08:54:59.104843 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="752c995e-5d01-4705-ab26-be06da61290d" containerName="rabbitmq" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.104851 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="752c995e-5d01-4705-ab26-be06da61290d" containerName="rabbitmq" Feb 27 08:54:59 crc kubenswrapper[4906]: E0227 08:54:59.104871 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="288d23ef-ae52-4275-a827-ebf77b2823ea" containerName="setup-container" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.104901 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="288d23ef-ae52-4275-a827-ebf77b2823ea" containerName="setup-container" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.105216 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="752c995e-5d01-4705-ab26-be06da61290d" containerName="rabbitmq" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.105239 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="288d23ef-ae52-4275-a827-ebf77b2823ea" containerName="rabbitmq" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.106749 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.111186 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.111238 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.111411 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.111450 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.111528 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.114573 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.114610 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-kt5pn" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.116056 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.119380 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.123976 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.124313 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.125428 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.125705 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.126203 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.126761 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.126813 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-p2xch" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.126967 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.138537 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.182620 4906 scope.go:117] "RemoveContainer" containerID="2b4e6472638d54358dd9429dcca0ac3b90e620e5c251cc657ca9849f7e1bc0a4" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276193 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/514e9bcd-1026-4d1c-a641-ce105057f1bf-server-conf\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276261 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ca80b5ea-d488-457b-b5f7-1be76770223e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276303 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/514e9bcd-1026-4d1c-a641-ce105057f1bf-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276427 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ca80b5ea-d488-457b-b5f7-1be76770223e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276470 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ca80b5ea-d488-457b-b5f7-1be76770223e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276485 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/514e9bcd-1026-4d1c-a641-ce105057f1bf-pod-info\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276510 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ca80b5ea-d488-457b-b5f7-1be76770223e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276530 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/514e9bcd-1026-4d1c-a641-ce105057f1bf-config-data\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276548 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zczg5\" (UniqueName: \"kubernetes.io/projected/514e9bcd-1026-4d1c-a641-ce105057f1bf-kube-api-access-zczg5\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276582 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/514e9bcd-1026-4d1c-a641-ce105057f1bf-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276605 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/514e9bcd-1026-4d1c-a641-ce105057f1bf-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276625 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/514e9bcd-1026-4d1c-a641-ce105057f1bf-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276644 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276660 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgdhl\" (UniqueName: \"kubernetes.io/projected/ca80b5ea-d488-457b-b5f7-1be76770223e-kube-api-access-rgdhl\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276678 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/514e9bcd-1026-4d1c-a641-ce105057f1bf-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276703 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ca80b5ea-d488-457b-b5f7-1be76770223e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276737 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ca80b5ea-d488-457b-b5f7-1be76770223e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276759 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ca80b5ea-d488-457b-b5f7-1be76770223e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276795 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ca80b5ea-d488-457b-b5f7-1be76770223e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276814 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276830 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ca80b5ea-d488-457b-b5f7-1be76770223e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.276848 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/514e9bcd-1026-4d1c-a641-ce105057f1bf-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.378137 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ca80b5ea-d488-457b-b5f7-1be76770223e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.378302 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/514e9bcd-1026-4d1c-a641-ce105057f1bf-pod-info\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.378408 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ca80b5ea-d488-457b-b5f7-1be76770223e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.378489 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/514e9bcd-1026-4d1c-a641-ce105057f1bf-config-data\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.378562 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zczg5\" (UniqueName: \"kubernetes.io/projected/514e9bcd-1026-4d1c-a641-ce105057f1bf-kube-api-access-zczg5\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.378693 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/514e9bcd-1026-4d1c-a641-ce105057f1bf-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.378774 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/514e9bcd-1026-4d1c-a641-ce105057f1bf-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.378846 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/514e9bcd-1026-4d1c-a641-ce105057f1bf-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.378931 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgdhl\" (UniqueName: \"kubernetes.io/projected/ca80b5ea-d488-457b-b5f7-1be76770223e-kube-api-access-rgdhl\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.379008 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.379077 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/514e9bcd-1026-4d1c-a641-ce105057f1bf-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.379149 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ca80b5ea-d488-457b-b5f7-1be76770223e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.379220 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ca80b5ea-d488-457b-b5f7-1be76770223e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.379291 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ca80b5ea-d488-457b-b5f7-1be76770223e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.379394 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ca80b5ea-d488-457b-b5f7-1be76770223e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.379469 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.379537 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ca80b5ea-d488-457b-b5f7-1be76770223e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.379597 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/514e9bcd-1026-4d1c-a641-ce105057f1bf-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.379683 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/514e9bcd-1026-4d1c-a641-ce105057f1bf-server-conf\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.379769 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ca80b5ea-d488-457b-b5f7-1be76770223e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.379842 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/514e9bcd-1026-4d1c-a641-ce105057f1bf-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.379935 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ca80b5ea-d488-457b-b5f7-1be76770223e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.380440 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/ca80b5ea-d488-457b-b5f7-1be76770223e-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.381525 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ca80b5ea-d488-457b-b5f7-1be76770223e-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.382245 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/514e9bcd-1026-4d1c-a641-ce105057f1bf-config-data\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.385857 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/514e9bcd-1026-4d1c-a641-ce105057f1bf-pod-info\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.386804 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/514e9bcd-1026-4d1c-a641-ce105057f1bf-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.387410 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/514e9bcd-1026-4d1c-a641-ce105057f1bf-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.387852 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/514e9bcd-1026-4d1c-a641-ce105057f1bf-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.389131 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/514e9bcd-1026-4d1c-a641-ce105057f1bf-server-conf\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.389242 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/ca80b5ea-d488-457b-b5f7-1be76770223e-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.390286 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/ca80b5ea-d488-457b-b5f7-1be76770223e-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.390765 4906 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.390831 4906 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.391344 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/514e9bcd-1026-4d1c-a641-ce105057f1bf-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.391861 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/514e9bcd-1026-4d1c-a641-ce105057f1bf-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.392366 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/ca80b5ea-d488-457b-b5f7-1be76770223e-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.392686 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/ca80b5ea-d488-457b-b5f7-1be76770223e-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.393629 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/ca80b5ea-d488-457b-b5f7-1be76770223e-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.394808 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/ca80b5ea-d488-457b-b5f7-1be76770223e-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.397811 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/ca80b5ea-d488-457b-b5f7-1be76770223e-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.400115 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/514e9bcd-1026-4d1c-a641-ce105057f1bf-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.400801 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zczg5\" (UniqueName: \"kubernetes.io/projected/514e9bcd-1026-4d1c-a641-ce105057f1bf-kube-api-access-zczg5\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.413636 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgdhl\" (UniqueName: \"kubernetes.io/projected/ca80b5ea-d488-457b-b5f7-1be76770223e-kube-api-access-rgdhl\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.436282 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"ca80b5ea-d488-457b-b5f7-1be76770223e\") " pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.443112 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"rabbitmq-server-0\" (UID: \"514e9bcd-1026-4d1c-a641-ce105057f1bf\") " pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.454250 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.469727 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:54:59 crc kubenswrapper[4906]: I0227 08:54:59.967680 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.060731 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.519340 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7877c86b6c-rq4wd"] Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.522077 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.526123 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.535630 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7877c86b6c-rq4wd"] Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.569052 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="288d23ef-ae52-4275-a827-ebf77b2823ea" path="/var/lib/kubelet/pods/288d23ef-ae52-4275-a827-ebf77b2823ea/volumes" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.570012 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="752c995e-5d01-4705-ab26-be06da61290d" path="/var/lib/kubelet/pods/752c995e-5d01-4705-ab26-be06da61290d/volumes" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.623655 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-config\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.623740 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-dns-svc\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.623797 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-dns-swift-storage-0\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.623844 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-ovsdbserver-nb\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.623872 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-ovsdbserver-sb\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.623919 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gff2\" (UniqueName: \"kubernetes.io/projected/9c43a621-f488-491d-939d-1721b61c706a-kube-api-access-7gff2\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.624019 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-openstack-edpm-ipam\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.725158 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-openstack-edpm-ipam\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.725320 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-config\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.726029 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-dns-svc\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.726341 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-dns-svc\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.726345 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-openstack-edpm-ipam\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.726352 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-config\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.726408 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-dns-swift-storage-0\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.726597 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-ovsdbserver-nb\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.726692 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-ovsdbserver-sb\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.726754 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gff2\" (UniqueName: \"kubernetes.io/projected/9c43a621-f488-491d-939d-1721b61c706a-kube-api-access-7gff2\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.727219 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-dns-swift-storage-0\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.727333 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-ovsdbserver-nb\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.727541 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-ovsdbserver-sb\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.750524 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gff2\" (UniqueName: \"kubernetes.io/projected/9c43a621-f488-491d-939d-1721b61c706a-kube-api-access-7gff2\") pod \"dnsmasq-dns-7877c86b6c-rq4wd\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.842999 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.981927 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"514e9bcd-1026-4d1c-a641-ce105057f1bf","Type":"ContainerStarted","Data":"2790cf54df5ecccd9a5de20e33d020b43d51b3dc56cb52b51e1ed172a9ab493d"} Feb 27 08:55:00 crc kubenswrapper[4906]: I0227 08:55:00.984530 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ca80b5ea-d488-457b-b5f7-1be76770223e","Type":"ContainerStarted","Data":"56ca3af8f63503004630adaa0e651e7234fc87ce7cf07c330ea08c6b30558163"} Feb 27 08:55:01 crc kubenswrapper[4906]: I0227 08:55:01.356096 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7877c86b6c-rq4wd"] Feb 27 08:55:01 crc kubenswrapper[4906]: I0227 08:55:01.996799 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" event={"ID":"9c43a621-f488-491d-939d-1721b61c706a","Type":"ContainerStarted","Data":"1eefdf55b157fc21b2fce2bb84dbf647852a5223a6fdb7b1a07ce585949e5676"} Feb 27 08:55:03 crc kubenswrapper[4906]: I0227 08:55:03.008357 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ca80b5ea-d488-457b-b5f7-1be76770223e","Type":"ContainerStarted","Data":"f107f6e120fd55a6cfda1482833245971a2ec35a3aa18ca09ad6260ddd4e3316"} Feb 27 08:55:03 crc kubenswrapper[4906]: I0227 08:55:03.011434 4906 generic.go:334] "Generic (PLEG): container finished" podID="9c43a621-f488-491d-939d-1721b61c706a" containerID="c2ff737f39b3ebd832586e58d85ba102a96884e1517abda35029a9164e3d30ed" exitCode=0 Feb 27 08:55:03 crc kubenswrapper[4906]: I0227 08:55:03.011563 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" event={"ID":"9c43a621-f488-491d-939d-1721b61c706a","Type":"ContainerDied","Data":"c2ff737f39b3ebd832586e58d85ba102a96884e1517abda35029a9164e3d30ed"} Feb 27 08:55:03 crc kubenswrapper[4906]: I0227 08:55:03.013740 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"514e9bcd-1026-4d1c-a641-ce105057f1bf","Type":"ContainerStarted","Data":"0e133d0896636aa5413f8924450c362d8a73c7eec3287045b188f8457abb162a"} Feb 27 08:55:04 crc kubenswrapper[4906]: I0227 08:55:04.027649 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" event={"ID":"9c43a621-f488-491d-939d-1721b61c706a","Type":"ContainerStarted","Data":"b4ab0c2c1d799c82ff35ee064f53107fca9f34af8532ef1b373820db04559166"} Feb 27 08:55:04 crc kubenswrapper[4906]: I0227 08:55:04.066659 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" podStartSLOduration=4.066625053 podStartE2EDuration="4.066625053s" podCreationTimestamp="2026-02-27 08:55:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:55:04.051846465 +0000 UTC m=+1602.446248075" watchObservedRunningTime="2026-02-27 08:55:04.066625053 +0000 UTC m=+1602.461026663" Feb 27 08:55:05 crc kubenswrapper[4906]: I0227 08:55:05.041448 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:10 crc kubenswrapper[4906]: I0227 08:55:10.845152 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:10 crc kubenswrapper[4906]: I0227 08:55:10.934419 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85649f948c-rhmfs"] Feb 27 08:55:10 crc kubenswrapper[4906]: I0227 08:55:10.934978 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-85649f948c-rhmfs" podUID="77e1c833-7b25-4d3d-bd7a-d24f619d4966" containerName="dnsmasq-dns" containerID="cri-o://ccb5f712a19a1f8e866e4a777195e5252c2425b1dcb6dc5b7a9ae3044da1e04e" gracePeriod=10 Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.090893 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6fb577f7d7-q2mns"] Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.093204 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.102855 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6fb577f7d7-q2mns"] Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.106834 4906 generic.go:334] "Generic (PLEG): container finished" podID="77e1c833-7b25-4d3d-bd7a-d24f619d4966" containerID="ccb5f712a19a1f8e866e4a777195e5252c2425b1dcb6dc5b7a9ae3044da1e04e" exitCode=0 Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.106943 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85649f948c-rhmfs" event={"ID":"77e1c833-7b25-4d3d-bd7a-d24f619d4966","Type":"ContainerDied","Data":"ccb5f712a19a1f8e866e4a777195e5252c2425b1dcb6dc5b7a9ae3044da1e04e"} Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.168119 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4af634eb-0270-43d7-bd3a-20cbde94f1f9-ovsdbserver-nb\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.168208 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4khf\" (UniqueName: \"kubernetes.io/projected/4af634eb-0270-43d7-bd3a-20cbde94f1f9-kube-api-access-m4khf\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.168249 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4af634eb-0270-43d7-bd3a-20cbde94f1f9-dns-swift-storage-0\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.168275 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4af634eb-0270-43d7-bd3a-20cbde94f1f9-openstack-edpm-ipam\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.168296 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4af634eb-0270-43d7-bd3a-20cbde94f1f9-dns-svc\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.168358 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4af634eb-0270-43d7-bd3a-20cbde94f1f9-config\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.168401 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4af634eb-0270-43d7-bd3a-20cbde94f1f9-ovsdbserver-sb\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.274406 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4af634eb-0270-43d7-bd3a-20cbde94f1f9-dns-swift-storage-0\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.274507 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4af634eb-0270-43d7-bd3a-20cbde94f1f9-openstack-edpm-ipam\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.274583 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4af634eb-0270-43d7-bd3a-20cbde94f1f9-dns-svc\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.274903 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4af634eb-0270-43d7-bd3a-20cbde94f1f9-config\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.275086 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4af634eb-0270-43d7-bd3a-20cbde94f1f9-ovsdbserver-sb\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.275318 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4af634eb-0270-43d7-bd3a-20cbde94f1f9-ovsdbserver-nb\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.275507 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4khf\" (UniqueName: \"kubernetes.io/projected/4af634eb-0270-43d7-bd3a-20cbde94f1f9-kube-api-access-m4khf\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.282299 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4af634eb-0270-43d7-bd3a-20cbde94f1f9-dns-swift-storage-0\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.283246 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4af634eb-0270-43d7-bd3a-20cbde94f1f9-config\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.283967 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4af634eb-0270-43d7-bd3a-20cbde94f1f9-dns-svc\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.284679 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4af634eb-0270-43d7-bd3a-20cbde94f1f9-ovsdbserver-sb\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.284824 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4af634eb-0270-43d7-bd3a-20cbde94f1f9-ovsdbserver-nb\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.279859 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/4af634eb-0270-43d7-bd3a-20cbde94f1f9-openstack-edpm-ipam\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.323346 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4khf\" (UniqueName: \"kubernetes.io/projected/4af634eb-0270-43d7-bd3a-20cbde94f1f9-kube-api-access-m4khf\") pod \"dnsmasq-dns-6fb577f7d7-q2mns\" (UID: \"4af634eb-0270-43d7-bd3a-20cbde94f1f9\") " pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.454101 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.564472 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.684035 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-ovsdbserver-nb\") pod \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.684104 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-22ppv\" (UniqueName: \"kubernetes.io/projected/77e1c833-7b25-4d3d-bd7a-d24f619d4966-kube-api-access-22ppv\") pod \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.684128 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-config\") pod \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.684261 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-ovsdbserver-sb\") pod \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.684344 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-dns-swift-storage-0\") pod \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.684658 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-dns-svc\") pod \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\" (UID: \"77e1c833-7b25-4d3d-bd7a-d24f619d4966\") " Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.698705 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77e1c833-7b25-4d3d-bd7a-d24f619d4966-kube-api-access-22ppv" (OuterVolumeSpecName: "kube-api-access-22ppv") pod "77e1c833-7b25-4d3d-bd7a-d24f619d4966" (UID: "77e1c833-7b25-4d3d-bd7a-d24f619d4966"). InnerVolumeSpecName "kube-api-access-22ppv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.746710 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "77e1c833-7b25-4d3d-bd7a-d24f619d4966" (UID: "77e1c833-7b25-4d3d-bd7a-d24f619d4966"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.749502 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "77e1c833-7b25-4d3d-bd7a-d24f619d4966" (UID: "77e1c833-7b25-4d3d-bd7a-d24f619d4966"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.754381 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "77e1c833-7b25-4d3d-bd7a-d24f619d4966" (UID: "77e1c833-7b25-4d3d-bd7a-d24f619d4966"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.757840 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-config" (OuterVolumeSpecName: "config") pod "77e1c833-7b25-4d3d-bd7a-d24f619d4966" (UID: "77e1c833-7b25-4d3d-bd7a-d24f619d4966"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.758589 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "77e1c833-7b25-4d3d-bd7a-d24f619d4966" (UID: "77e1c833-7b25-4d3d-bd7a-d24f619d4966"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.787496 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.787532 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-22ppv\" (UniqueName: \"kubernetes.io/projected/77e1c833-7b25-4d3d-bd7a-d24f619d4966-kube-api-access-22ppv\") on node \"crc\" DevicePath \"\"" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.787547 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.787560 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.787570 4906 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.787580 4906 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/77e1c833-7b25-4d3d-bd7a-d24f619d4966-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 27 08:55:11 crc kubenswrapper[4906]: I0227 08:55:11.957794 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6fb577f7d7-q2mns"] Feb 27 08:55:12 crc kubenswrapper[4906]: I0227 08:55:12.128802 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85649f948c-rhmfs" event={"ID":"77e1c833-7b25-4d3d-bd7a-d24f619d4966","Type":"ContainerDied","Data":"fc52f0af8659db6b46695fb31f4744f07e5d64aafd97471aeac65142ebdba4e0"} Feb 27 08:55:12 crc kubenswrapper[4906]: I0227 08:55:12.128936 4906 scope.go:117] "RemoveContainer" containerID="ccb5f712a19a1f8e866e4a777195e5252c2425b1dcb6dc5b7a9ae3044da1e04e" Feb 27 08:55:12 crc kubenswrapper[4906]: I0227 08:55:12.128851 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85649f948c-rhmfs" Feb 27 08:55:12 crc kubenswrapper[4906]: I0227 08:55:12.130596 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" event={"ID":"4af634eb-0270-43d7-bd3a-20cbde94f1f9","Type":"ContainerStarted","Data":"42271192f9bccf97f63620839a983ea08ac8831eddc2c95ae0c8280f41e7d59f"} Feb 27 08:55:12 crc kubenswrapper[4906]: I0227 08:55:12.185444 4906 scope.go:117] "RemoveContainer" containerID="8fa831683e89f27f7d0c5c2f71ad0f2020ae19b014e010d61ca4c141750d686b" Feb 27 08:55:12 crc kubenswrapper[4906]: I0227 08:55:12.217966 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85649f948c-rhmfs"] Feb 27 08:55:12 crc kubenswrapper[4906]: I0227 08:55:12.230584 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-85649f948c-rhmfs"] Feb 27 08:55:12 crc kubenswrapper[4906]: I0227 08:55:12.565220 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77e1c833-7b25-4d3d-bd7a-d24f619d4966" path="/var/lib/kubelet/pods/77e1c833-7b25-4d3d-bd7a-d24f619d4966/volumes" Feb 27 08:55:13 crc kubenswrapper[4906]: I0227 08:55:13.145118 4906 generic.go:334] "Generic (PLEG): container finished" podID="4af634eb-0270-43d7-bd3a-20cbde94f1f9" containerID="55eee1c63ddd97c9430a93ae7d5692bbaf75473483ea687761a0a76dba7d02e5" exitCode=0 Feb 27 08:55:13 crc kubenswrapper[4906]: I0227 08:55:13.145233 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" event={"ID":"4af634eb-0270-43d7-bd3a-20cbde94f1f9","Type":"ContainerDied","Data":"55eee1c63ddd97c9430a93ae7d5692bbaf75473483ea687761a0a76dba7d02e5"} Feb 27 08:55:14 crc kubenswrapper[4906]: I0227 08:55:14.168370 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" event={"ID":"4af634eb-0270-43d7-bd3a-20cbde94f1f9","Type":"ContainerStarted","Data":"afdd532614d4830967829ee77afbb1fa9bd661c2ad17f74b17a11433e991cc2a"} Feb 27 08:55:14 crc kubenswrapper[4906]: I0227 08:55:14.169094 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:14 crc kubenswrapper[4906]: I0227 08:55:14.208103 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" podStartSLOduration=3.208069239 podStartE2EDuration="3.208069239s" podCreationTimestamp="2026-02-27 08:55:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:55:14.192545301 +0000 UTC m=+1612.586946921" watchObservedRunningTime="2026-02-27 08:55:14.208069239 +0000 UTC m=+1612.602470889" Feb 27 08:55:21 crc kubenswrapper[4906]: I0227 08:55:21.456056 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6fb577f7d7-q2mns" Feb 27 08:55:21 crc kubenswrapper[4906]: I0227 08:55:21.518731 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7877c86b6c-rq4wd"] Feb 27 08:55:21 crc kubenswrapper[4906]: I0227 08:55:21.519541 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" podUID="9c43a621-f488-491d-939d-1721b61c706a" containerName="dnsmasq-dns" containerID="cri-o://b4ab0c2c1d799c82ff35ee064f53107fca9f34af8532ef1b373820db04559166" gracePeriod=10 Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.032266 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.166773 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7gff2\" (UniqueName: \"kubernetes.io/projected/9c43a621-f488-491d-939d-1721b61c706a-kube-api-access-7gff2\") pod \"9c43a621-f488-491d-939d-1721b61c706a\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.168603 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-dns-swift-storage-0\") pod \"9c43a621-f488-491d-939d-1721b61c706a\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.168719 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-dns-svc\") pod \"9c43a621-f488-491d-939d-1721b61c706a\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.169254 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-openstack-edpm-ipam\") pod \"9c43a621-f488-491d-939d-1721b61c706a\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.169316 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-config\") pod \"9c43a621-f488-491d-939d-1721b61c706a\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.169486 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-ovsdbserver-sb\") pod \"9c43a621-f488-491d-939d-1721b61c706a\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.169585 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-ovsdbserver-nb\") pod \"9c43a621-f488-491d-939d-1721b61c706a\" (UID: \"9c43a621-f488-491d-939d-1721b61c706a\") " Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.175934 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c43a621-f488-491d-939d-1721b61c706a-kube-api-access-7gff2" (OuterVolumeSpecName: "kube-api-access-7gff2") pod "9c43a621-f488-491d-939d-1721b61c706a" (UID: "9c43a621-f488-491d-939d-1721b61c706a"). InnerVolumeSpecName "kube-api-access-7gff2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.233181 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9c43a621-f488-491d-939d-1721b61c706a" (UID: "9c43a621-f488-491d-939d-1721b61c706a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.234211 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9c43a621-f488-491d-939d-1721b61c706a" (UID: "9c43a621-f488-491d-939d-1721b61c706a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.237108 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9c43a621-f488-491d-939d-1721b61c706a" (UID: "9c43a621-f488-491d-939d-1721b61c706a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.237655 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-config" (OuterVolumeSpecName: "config") pod "9c43a621-f488-491d-939d-1721b61c706a" (UID: "9c43a621-f488-491d-939d-1721b61c706a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.253311 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "9c43a621-f488-491d-939d-1721b61c706a" (UID: "9c43a621-f488-491d-939d-1721b61c706a"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.255272 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9c43a621-f488-491d-939d-1721b61c706a" (UID: "9c43a621-f488-491d-939d-1721b61c706a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.260744 4906 generic.go:334] "Generic (PLEG): container finished" podID="9c43a621-f488-491d-939d-1721b61c706a" containerID="b4ab0c2c1d799c82ff35ee064f53107fca9f34af8532ef1b373820db04559166" exitCode=0 Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.260840 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.260858 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" event={"ID":"9c43a621-f488-491d-939d-1721b61c706a","Type":"ContainerDied","Data":"b4ab0c2c1d799c82ff35ee064f53107fca9f34af8532ef1b373820db04559166"} Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.261377 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7877c86b6c-rq4wd" event={"ID":"9c43a621-f488-491d-939d-1721b61c706a","Type":"ContainerDied","Data":"1eefdf55b157fc21b2fce2bb84dbf647852a5223a6fdb7b1a07ce585949e5676"} Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.261401 4906 scope.go:117] "RemoveContainer" containerID="b4ab0c2c1d799c82ff35ee064f53107fca9f34af8532ef1b373820db04559166" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.277651 4906 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.277720 4906 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-dns-svc\") on node \"crc\" DevicePath \"\"" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.277734 4906 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.277745 4906 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-config\") on node \"crc\" DevicePath \"\"" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.277761 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.277771 4906 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9c43a621-f488-491d-939d-1721b61c706a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.277808 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7gff2\" (UniqueName: \"kubernetes.io/projected/9c43a621-f488-491d-939d-1721b61c706a-kube-api-access-7gff2\") on node \"crc\" DevicePath \"\"" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.337371 4906 scope.go:117] "RemoveContainer" containerID="c2ff737f39b3ebd832586e58d85ba102a96884e1517abda35029a9164e3d30ed" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.338340 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7877c86b6c-rq4wd"] Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.349764 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7877c86b6c-rq4wd"] Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.359821 4906 scope.go:117] "RemoveContainer" containerID="b4ab0c2c1d799c82ff35ee064f53107fca9f34af8532ef1b373820db04559166" Feb 27 08:55:22 crc kubenswrapper[4906]: E0227 08:55:22.360663 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b4ab0c2c1d799c82ff35ee064f53107fca9f34af8532ef1b373820db04559166\": container with ID starting with b4ab0c2c1d799c82ff35ee064f53107fca9f34af8532ef1b373820db04559166 not found: ID does not exist" containerID="b4ab0c2c1d799c82ff35ee064f53107fca9f34af8532ef1b373820db04559166" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.360694 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b4ab0c2c1d799c82ff35ee064f53107fca9f34af8532ef1b373820db04559166"} err="failed to get container status \"b4ab0c2c1d799c82ff35ee064f53107fca9f34af8532ef1b373820db04559166\": rpc error: code = NotFound desc = could not find container \"b4ab0c2c1d799c82ff35ee064f53107fca9f34af8532ef1b373820db04559166\": container with ID starting with b4ab0c2c1d799c82ff35ee064f53107fca9f34af8532ef1b373820db04559166 not found: ID does not exist" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.360717 4906 scope.go:117] "RemoveContainer" containerID="c2ff737f39b3ebd832586e58d85ba102a96884e1517abda35029a9164e3d30ed" Feb 27 08:55:22 crc kubenswrapper[4906]: E0227 08:55:22.361076 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2ff737f39b3ebd832586e58d85ba102a96884e1517abda35029a9164e3d30ed\": container with ID starting with c2ff737f39b3ebd832586e58d85ba102a96884e1517abda35029a9164e3d30ed not found: ID does not exist" containerID="c2ff737f39b3ebd832586e58d85ba102a96884e1517abda35029a9164e3d30ed" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.361107 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2ff737f39b3ebd832586e58d85ba102a96884e1517abda35029a9164e3d30ed"} err="failed to get container status \"c2ff737f39b3ebd832586e58d85ba102a96884e1517abda35029a9164e3d30ed\": rpc error: code = NotFound desc = could not find container \"c2ff737f39b3ebd832586e58d85ba102a96884e1517abda35029a9164e3d30ed\": container with ID starting with c2ff737f39b3ebd832586e58d85ba102a96884e1517abda35029a9164e3d30ed not found: ID does not exist" Feb 27 08:55:22 crc kubenswrapper[4906]: I0227 08:55:22.579152 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c43a621-f488-491d-939d-1721b61c706a" path="/var/lib/kubelet/pods/9c43a621-f488-491d-939d-1721b61c706a/volumes" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.547138 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l"] Feb 27 08:55:34 crc kubenswrapper[4906]: E0227 08:55:34.548449 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77e1c833-7b25-4d3d-bd7a-d24f619d4966" containerName="init" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.548466 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="77e1c833-7b25-4d3d-bd7a-d24f619d4966" containerName="init" Feb 27 08:55:34 crc kubenswrapper[4906]: E0227 08:55:34.548480 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77e1c833-7b25-4d3d-bd7a-d24f619d4966" containerName="dnsmasq-dns" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.548487 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="77e1c833-7b25-4d3d-bd7a-d24f619d4966" containerName="dnsmasq-dns" Feb 27 08:55:34 crc kubenswrapper[4906]: E0227 08:55:34.548496 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c43a621-f488-491d-939d-1721b61c706a" containerName="init" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.548503 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c43a621-f488-491d-939d-1721b61c706a" containerName="init" Feb 27 08:55:34 crc kubenswrapper[4906]: E0227 08:55:34.548535 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c43a621-f488-491d-939d-1721b61c706a" containerName="dnsmasq-dns" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.548541 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c43a621-f488-491d-939d-1721b61c706a" containerName="dnsmasq-dns" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.548755 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="77e1c833-7b25-4d3d-bd7a-d24f619d4966" containerName="dnsmasq-dns" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.548776 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c43a621-f488-491d-939d-1721b61c706a" containerName="dnsmasq-dns" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.549619 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.553280 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.553289 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.553485 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.553656 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d466j" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.567674 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l"] Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.653444 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/878d7d32-bc60-4edf-aa59-82548a53fe4c-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l\" (UID: \"878d7d32-bc60-4edf-aa59-82548a53fe4c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.654159 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/878d7d32-bc60-4edf-aa59-82548a53fe4c-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l\" (UID: \"878d7d32-bc60-4edf-aa59-82548a53fe4c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.654266 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/878d7d32-bc60-4edf-aa59-82548a53fe4c-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l\" (UID: \"878d7d32-bc60-4edf-aa59-82548a53fe4c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.654470 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qslpq\" (UniqueName: \"kubernetes.io/projected/878d7d32-bc60-4edf-aa59-82548a53fe4c-kube-api-access-qslpq\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l\" (UID: \"878d7d32-bc60-4edf-aa59-82548a53fe4c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.757650 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/878d7d32-bc60-4edf-aa59-82548a53fe4c-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l\" (UID: \"878d7d32-bc60-4edf-aa59-82548a53fe4c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.757825 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/878d7d32-bc60-4edf-aa59-82548a53fe4c-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l\" (UID: \"878d7d32-bc60-4edf-aa59-82548a53fe4c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.757911 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/878d7d32-bc60-4edf-aa59-82548a53fe4c-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l\" (UID: \"878d7d32-bc60-4edf-aa59-82548a53fe4c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.757968 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qslpq\" (UniqueName: \"kubernetes.io/projected/878d7d32-bc60-4edf-aa59-82548a53fe4c-kube-api-access-qslpq\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l\" (UID: \"878d7d32-bc60-4edf-aa59-82548a53fe4c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.774949 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/878d7d32-bc60-4edf-aa59-82548a53fe4c-ssh-key-openstack-edpm-ipam\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l\" (UID: \"878d7d32-bc60-4edf-aa59-82548a53fe4c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.775077 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/878d7d32-bc60-4edf-aa59-82548a53fe4c-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l\" (UID: \"878d7d32-bc60-4edf-aa59-82548a53fe4c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.776129 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/878d7d32-bc60-4edf-aa59-82548a53fe4c-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l\" (UID: \"878d7d32-bc60-4edf-aa59-82548a53fe4c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.779093 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qslpq\" (UniqueName: \"kubernetes.io/projected/878d7d32-bc60-4edf-aa59-82548a53fe4c-kube-api-access-qslpq\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l\" (UID: \"878d7d32-bc60-4edf-aa59-82548a53fe4c\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" Feb 27 08:55:34 crc kubenswrapper[4906]: I0227 08:55:34.988161 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" Feb 27 08:55:35 crc kubenswrapper[4906]: I0227 08:55:35.392153 4906 generic.go:334] "Generic (PLEG): container finished" podID="514e9bcd-1026-4d1c-a641-ce105057f1bf" containerID="0e133d0896636aa5413f8924450c362d8a73c7eec3287045b188f8457abb162a" exitCode=0 Feb 27 08:55:35 crc kubenswrapper[4906]: I0227 08:55:35.392297 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"514e9bcd-1026-4d1c-a641-ce105057f1bf","Type":"ContainerDied","Data":"0e133d0896636aa5413f8924450c362d8a73c7eec3287045b188f8457abb162a"} Feb 27 08:55:35 crc kubenswrapper[4906]: I0227 08:55:35.394669 4906 generic.go:334] "Generic (PLEG): container finished" podID="ca80b5ea-d488-457b-b5f7-1be76770223e" containerID="f107f6e120fd55a6cfda1482833245971a2ec35a3aa18ca09ad6260ddd4e3316" exitCode=0 Feb 27 08:55:35 crc kubenswrapper[4906]: I0227 08:55:35.394699 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ca80b5ea-d488-457b-b5f7-1be76770223e","Type":"ContainerDied","Data":"f107f6e120fd55a6cfda1482833245971a2ec35a3aa18ca09ad6260ddd4e3316"} Feb 27 08:55:35 crc kubenswrapper[4906]: I0227 08:55:35.633616 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l"] Feb 27 08:55:35 crc kubenswrapper[4906]: W0227 08:55:35.640591 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod878d7d32_bc60_4edf_aa59_82548a53fe4c.slice/crio-a31f8900506969dc976d5c1dc7435e6e115b905d00c84074c12d3c0e92c7ea19 WatchSource:0}: Error finding container a31f8900506969dc976d5c1dc7435e6e115b905d00c84074c12d3c0e92c7ea19: Status 404 returned error can't find the container with id a31f8900506969dc976d5c1dc7435e6e115b905d00c84074c12d3c0e92c7ea19 Feb 27 08:55:36 crc kubenswrapper[4906]: I0227 08:55:36.414803 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"514e9bcd-1026-4d1c-a641-ce105057f1bf","Type":"ContainerStarted","Data":"caff15cb72396cf9d28b9639332443cf1e2de03ab0dde9bee9a3a1f4c8d5a210"} Feb 27 08:55:36 crc kubenswrapper[4906]: I0227 08:55:36.416324 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Feb 27 08:55:36 crc kubenswrapper[4906]: I0227 08:55:36.419140 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"ca80b5ea-d488-457b-b5f7-1be76770223e","Type":"ContainerStarted","Data":"e50aaa8de2e0434be73bef10b19beebdfa7e34e12a94b2d615d08f0ecbd37737"} Feb 27 08:55:36 crc kubenswrapper[4906]: I0227 08:55:36.419809 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:55:36 crc kubenswrapper[4906]: I0227 08:55:36.421169 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" event={"ID":"878d7d32-bc60-4edf-aa59-82548a53fe4c","Type":"ContainerStarted","Data":"a31f8900506969dc976d5c1dc7435e6e115b905d00c84074c12d3c0e92c7ea19"} Feb 27 08:55:36 crc kubenswrapper[4906]: I0227 08:55:36.460343 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=38.460315155 podStartE2EDuration="38.460315155s" podCreationTimestamp="2026-02-27 08:54:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:55:36.44677947 +0000 UTC m=+1634.841181090" watchObservedRunningTime="2026-02-27 08:55:36.460315155 +0000 UTC m=+1634.854716765" Feb 27 08:55:36 crc kubenswrapper[4906]: I0227 08:55:36.480805 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.480775523 podStartE2EDuration="37.480775523s" podCreationTimestamp="2026-02-27 08:54:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 08:55:36.479848748 +0000 UTC m=+1634.874250368" watchObservedRunningTime="2026-02-27 08:55:36.480775523 +0000 UTC m=+1634.875177133" Feb 27 08:55:46 crc kubenswrapper[4906]: I0227 08:55:46.533955 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" event={"ID":"878d7d32-bc60-4edf-aa59-82548a53fe4c","Type":"ContainerStarted","Data":"bff31bfebd0fb38872298164c859957aaea966c3995664ed48db2082a6f6d6bc"} Feb 27 08:55:46 crc kubenswrapper[4906]: I0227 08:55:46.577225 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" podStartSLOduration=2.6373815560000002 podStartE2EDuration="12.577198846s" podCreationTimestamp="2026-02-27 08:55:34 +0000 UTC" firstStartedPulling="2026-02-27 08:55:35.644828088 +0000 UTC m=+1634.039229708" lastFinishedPulling="2026-02-27 08:55:45.584645388 +0000 UTC m=+1643.979046998" observedRunningTime="2026-02-27 08:55:46.558529355 +0000 UTC m=+1644.952930975" watchObservedRunningTime="2026-02-27 08:55:46.577198846 +0000 UTC m=+1644.971600466" Feb 27 08:55:49 crc kubenswrapper[4906]: I0227 08:55:49.459445 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Feb 27 08:55:49 crc kubenswrapper[4906]: I0227 08:55:49.474168 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Feb 27 08:55:56 crc kubenswrapper[4906]: I0227 08:55:56.689419 4906 generic.go:334] "Generic (PLEG): container finished" podID="878d7d32-bc60-4edf-aa59-82548a53fe4c" containerID="bff31bfebd0fb38872298164c859957aaea966c3995664ed48db2082a6f6d6bc" exitCode=0 Feb 27 08:55:56 crc kubenswrapper[4906]: I0227 08:55:56.689523 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" event={"ID":"878d7d32-bc60-4edf-aa59-82548a53fe4c","Type":"ContainerDied","Data":"bff31bfebd0fb38872298164c859957aaea966c3995664ed48db2082a6f6d6bc"} Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.240594 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.243795 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/878d7d32-bc60-4edf-aa59-82548a53fe4c-ssh-key-openstack-edpm-ipam\") pod \"878d7d32-bc60-4edf-aa59-82548a53fe4c\" (UID: \"878d7d32-bc60-4edf-aa59-82548a53fe4c\") " Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.243923 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/878d7d32-bc60-4edf-aa59-82548a53fe4c-repo-setup-combined-ca-bundle\") pod \"878d7d32-bc60-4edf-aa59-82548a53fe4c\" (UID: \"878d7d32-bc60-4edf-aa59-82548a53fe4c\") " Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.243991 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qslpq\" (UniqueName: \"kubernetes.io/projected/878d7d32-bc60-4edf-aa59-82548a53fe4c-kube-api-access-qslpq\") pod \"878d7d32-bc60-4edf-aa59-82548a53fe4c\" (UID: \"878d7d32-bc60-4edf-aa59-82548a53fe4c\") " Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.244054 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/878d7d32-bc60-4edf-aa59-82548a53fe4c-inventory\") pod \"878d7d32-bc60-4edf-aa59-82548a53fe4c\" (UID: \"878d7d32-bc60-4edf-aa59-82548a53fe4c\") " Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.280015 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/878d7d32-bc60-4edf-aa59-82548a53fe4c-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "878d7d32-bc60-4edf-aa59-82548a53fe4c" (UID: "878d7d32-bc60-4edf-aa59-82548a53fe4c"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.354330 4906 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/878d7d32-bc60-4edf-aa59-82548a53fe4c-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.365764 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/878d7d32-bc60-4edf-aa59-82548a53fe4c-kube-api-access-qslpq" (OuterVolumeSpecName: "kube-api-access-qslpq") pod "878d7d32-bc60-4edf-aa59-82548a53fe4c" (UID: "878d7d32-bc60-4edf-aa59-82548a53fe4c"). InnerVolumeSpecName "kube-api-access-qslpq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.368988 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/878d7d32-bc60-4edf-aa59-82548a53fe4c-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "878d7d32-bc60-4edf-aa59-82548a53fe4c" (UID: "878d7d32-bc60-4edf-aa59-82548a53fe4c"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.370481 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/878d7d32-bc60-4edf-aa59-82548a53fe4c-inventory" (OuterVolumeSpecName: "inventory") pod "878d7d32-bc60-4edf-aa59-82548a53fe4c" (UID: "878d7d32-bc60-4edf-aa59-82548a53fe4c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.457080 4906 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/878d7d32-bc60-4edf-aa59-82548a53fe4c-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.457458 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qslpq\" (UniqueName: \"kubernetes.io/projected/878d7d32-bc60-4edf-aa59-82548a53fe4c-kube-api-access-qslpq\") on node \"crc\" DevicePath \"\"" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.457561 4906 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/878d7d32-bc60-4edf-aa59-82548a53fe4c-inventory\") on node \"crc\" DevicePath \"\"" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.734274 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" event={"ID":"878d7d32-bc60-4edf-aa59-82548a53fe4c","Type":"ContainerDied","Data":"a31f8900506969dc976d5c1dc7435e6e115b905d00c84074c12d3c0e92c7ea19"} Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.734711 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a31f8900506969dc976d5c1dc7435e6e115b905d00c84074c12d3c0e92c7ea19" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.734668 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.906625 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8"] Feb 27 08:55:58 crc kubenswrapper[4906]: E0227 08:55:58.907167 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="878d7d32-bc60-4edf-aa59-82548a53fe4c" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.907190 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="878d7d32-bc60-4edf-aa59-82548a53fe4c" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.907381 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="878d7d32-bc60-4edf-aa59-82548a53fe4c" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.908150 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.913042 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.913316 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.913483 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.913580 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d466j" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.925762 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8"] Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.970503 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f0a22a37-97f6-42ad-ba15-d6c8a352d831-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-p5bw8\" (UID: \"f0a22a37-97f6-42ad-ba15-d6c8a352d831\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.970639 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mggmc\" (UniqueName: \"kubernetes.io/projected/f0a22a37-97f6-42ad-ba15-d6c8a352d831-kube-api-access-mggmc\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-p5bw8\" (UID: \"f0a22a37-97f6-42ad-ba15-d6c8a352d831\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8" Feb 27 08:55:58 crc kubenswrapper[4906]: I0227 08:55:58.970960 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f0a22a37-97f6-42ad-ba15-d6c8a352d831-ssh-key-openstack-edpm-ipam\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-p5bw8\" (UID: \"f0a22a37-97f6-42ad-ba15-d6c8a352d831\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8" Feb 27 08:55:59 crc kubenswrapper[4906]: I0227 08:55:59.072806 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f0a22a37-97f6-42ad-ba15-d6c8a352d831-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-p5bw8\" (UID: \"f0a22a37-97f6-42ad-ba15-d6c8a352d831\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8" Feb 27 08:55:59 crc kubenswrapper[4906]: I0227 08:55:59.072867 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mggmc\" (UniqueName: \"kubernetes.io/projected/f0a22a37-97f6-42ad-ba15-d6c8a352d831-kube-api-access-mggmc\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-p5bw8\" (UID: \"f0a22a37-97f6-42ad-ba15-d6c8a352d831\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8" Feb 27 08:55:59 crc kubenswrapper[4906]: I0227 08:55:59.072943 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f0a22a37-97f6-42ad-ba15-d6c8a352d831-ssh-key-openstack-edpm-ipam\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-p5bw8\" (UID: \"f0a22a37-97f6-42ad-ba15-d6c8a352d831\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8" Feb 27 08:55:59 crc kubenswrapper[4906]: I0227 08:55:59.077628 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f0a22a37-97f6-42ad-ba15-d6c8a352d831-ssh-key-openstack-edpm-ipam\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-p5bw8\" (UID: \"f0a22a37-97f6-42ad-ba15-d6c8a352d831\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8" Feb 27 08:55:59 crc kubenswrapper[4906]: I0227 08:55:59.077636 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f0a22a37-97f6-42ad-ba15-d6c8a352d831-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-p5bw8\" (UID: \"f0a22a37-97f6-42ad-ba15-d6c8a352d831\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8" Feb 27 08:55:59 crc kubenswrapper[4906]: I0227 08:55:59.092603 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mggmc\" (UniqueName: \"kubernetes.io/projected/f0a22a37-97f6-42ad-ba15-d6c8a352d831-kube-api-access-mggmc\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-p5bw8\" (UID: \"f0a22a37-97f6-42ad-ba15-d6c8a352d831\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8" Feb 27 08:55:59 crc kubenswrapper[4906]: I0227 08:55:59.228242 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8" Feb 27 08:55:59 crc kubenswrapper[4906]: I0227 08:55:59.434149 4906 scope.go:117] "RemoveContainer" containerID="c90b582ab85e0cdce4e8a8b87c0e89e34ac759cafeb76062c99ead1c13a7a5ef" Feb 27 08:55:59 crc kubenswrapper[4906]: I0227 08:55:59.516770 4906 scope.go:117] "RemoveContainer" containerID="043eef2a73f282b18bd7c046ef10a369c35da12a4fa980643dcecde7b720cba1" Feb 27 08:55:59 crc kubenswrapper[4906]: I0227 08:55:59.554798 4906 scope.go:117] "RemoveContainer" containerID="673a8338d5a8f6329be335e66d637dc9dc85411c9b14d49d5c5b8591c2c2d501" Feb 27 08:55:59 crc kubenswrapper[4906]: I0227 08:55:59.890265 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8"] Feb 27 08:55:59 crc kubenswrapper[4906]: W0227 08:55:59.897843 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf0a22a37_97f6_42ad_ba15_d6c8a352d831.slice/crio-26c884ae605e0bbff9973b407e900901733e12f654cf2d5735895a98d153d81d WatchSource:0}: Error finding container 26c884ae605e0bbff9973b407e900901733e12f654cf2d5735895a98d153d81d: Status 404 returned error can't find the container with id 26c884ae605e0bbff9973b407e900901733e12f654cf2d5735895a98d153d81d Feb 27 08:56:00 crc kubenswrapper[4906]: I0227 08:56:00.149467 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536376-dbcs9"] Feb 27 08:56:00 crc kubenswrapper[4906]: I0227 08:56:00.151272 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536376-dbcs9" Feb 27 08:56:00 crc kubenswrapper[4906]: I0227 08:56:00.153639 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 08:56:00 crc kubenswrapper[4906]: I0227 08:56:00.154473 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 08:56:00 crc kubenswrapper[4906]: I0227 08:56:00.155840 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 08:56:00 crc kubenswrapper[4906]: I0227 08:56:00.161258 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536376-dbcs9"] Feb 27 08:56:00 crc kubenswrapper[4906]: I0227 08:56:00.201260 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7rsq\" (UniqueName: \"kubernetes.io/projected/a5a10e44-232f-494b-b06e-01dbcc0f8493-kube-api-access-l7rsq\") pod \"auto-csr-approver-29536376-dbcs9\" (UID: \"a5a10e44-232f-494b-b06e-01dbcc0f8493\") " pod="openshift-infra/auto-csr-approver-29536376-dbcs9" Feb 27 08:56:00 crc kubenswrapper[4906]: I0227 08:56:00.303474 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7rsq\" (UniqueName: \"kubernetes.io/projected/a5a10e44-232f-494b-b06e-01dbcc0f8493-kube-api-access-l7rsq\") pod \"auto-csr-approver-29536376-dbcs9\" (UID: \"a5a10e44-232f-494b-b06e-01dbcc0f8493\") " pod="openshift-infra/auto-csr-approver-29536376-dbcs9" Feb 27 08:56:00 crc kubenswrapper[4906]: I0227 08:56:00.335078 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7rsq\" (UniqueName: \"kubernetes.io/projected/a5a10e44-232f-494b-b06e-01dbcc0f8493-kube-api-access-l7rsq\") pod \"auto-csr-approver-29536376-dbcs9\" (UID: \"a5a10e44-232f-494b-b06e-01dbcc0f8493\") " pod="openshift-infra/auto-csr-approver-29536376-dbcs9" Feb 27 08:56:00 crc kubenswrapper[4906]: I0227 08:56:00.533940 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536376-dbcs9" Feb 27 08:56:00 crc kubenswrapper[4906]: I0227 08:56:00.774461 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8" event={"ID":"f0a22a37-97f6-42ad-ba15-d6c8a352d831","Type":"ContainerStarted","Data":"f62fdecc2bb4a85d1dbb2a596c635f672afecdc936e1357831392388f15907ee"} Feb 27 08:56:00 crc kubenswrapper[4906]: I0227 08:56:00.775004 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8" event={"ID":"f0a22a37-97f6-42ad-ba15-d6c8a352d831","Type":"ContainerStarted","Data":"26c884ae605e0bbff9973b407e900901733e12f654cf2d5735895a98d153d81d"} Feb 27 08:56:00 crc kubenswrapper[4906]: I0227 08:56:00.799625 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8" podStartSLOduration=2.351355111 podStartE2EDuration="2.799591623s" podCreationTimestamp="2026-02-27 08:55:58 +0000 UTC" firstStartedPulling="2026-02-27 08:55:59.902264966 +0000 UTC m=+1658.296666586" lastFinishedPulling="2026-02-27 08:56:00.350501488 +0000 UTC m=+1658.744903098" observedRunningTime="2026-02-27 08:56:00.795258259 +0000 UTC m=+1659.189659869" watchObservedRunningTime="2026-02-27 08:56:00.799591623 +0000 UTC m=+1659.193993233" Feb 27 08:56:01 crc kubenswrapper[4906]: W0227 08:56:01.041919 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda5a10e44_232f_494b_b06e_01dbcc0f8493.slice/crio-7dcd7ed4b5575ebc59ffa3516ce50bfe3e7614842a4c4c5ad80f8ae79e969bad WatchSource:0}: Error finding container 7dcd7ed4b5575ebc59ffa3516ce50bfe3e7614842a4c4c5ad80f8ae79e969bad: Status 404 returned error can't find the container with id 7dcd7ed4b5575ebc59ffa3516ce50bfe3e7614842a4c4c5ad80f8ae79e969bad Feb 27 08:56:01 crc kubenswrapper[4906]: I0227 08:56:01.045680 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536376-dbcs9"] Feb 27 08:56:01 crc kubenswrapper[4906]: I0227 08:56:01.786751 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536376-dbcs9" event={"ID":"a5a10e44-232f-494b-b06e-01dbcc0f8493","Type":"ContainerStarted","Data":"7dcd7ed4b5575ebc59ffa3516ce50bfe3e7614842a4c4c5ad80f8ae79e969bad"} Feb 27 08:56:02 crc kubenswrapper[4906]: I0227 08:56:02.802596 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536376-dbcs9" event={"ID":"a5a10e44-232f-494b-b06e-01dbcc0f8493","Type":"ContainerStarted","Data":"b19539382c9a1f6da8731edc6dc197a43ee30da3378623c2d2f0f72da2858b44"} Feb 27 08:56:02 crc kubenswrapper[4906]: I0227 08:56:02.826700 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-infra/auto-csr-approver-29536376-dbcs9" podStartSLOduration=1.4925081709999999 podStartE2EDuration="2.826666941s" podCreationTimestamp="2026-02-27 08:56:00 +0000 UTC" firstStartedPulling="2026-02-27 08:56:01.053002548 +0000 UTC m=+1659.447404168" lastFinishedPulling="2026-02-27 08:56:02.387161328 +0000 UTC m=+1660.781562938" observedRunningTime="2026-02-27 08:56:02.822584463 +0000 UTC m=+1661.216986073" watchObservedRunningTime="2026-02-27 08:56:02.826666941 +0000 UTC m=+1661.221068561" Feb 27 08:56:03 crc kubenswrapper[4906]: I0227 08:56:03.817027 4906 generic.go:334] "Generic (PLEG): container finished" podID="f0a22a37-97f6-42ad-ba15-d6c8a352d831" containerID="f62fdecc2bb4a85d1dbb2a596c635f672afecdc936e1357831392388f15907ee" exitCode=0 Feb 27 08:56:03 crc kubenswrapper[4906]: I0227 08:56:03.817153 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8" event={"ID":"f0a22a37-97f6-42ad-ba15-d6c8a352d831","Type":"ContainerDied","Data":"f62fdecc2bb4a85d1dbb2a596c635f672afecdc936e1357831392388f15907ee"} Feb 27 08:56:03 crc kubenswrapper[4906]: I0227 08:56:03.821648 4906 generic.go:334] "Generic (PLEG): container finished" podID="a5a10e44-232f-494b-b06e-01dbcc0f8493" containerID="b19539382c9a1f6da8731edc6dc197a43ee30da3378623c2d2f0f72da2858b44" exitCode=0 Feb 27 08:56:03 crc kubenswrapper[4906]: I0227 08:56:03.821690 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536376-dbcs9" event={"ID":"a5a10e44-232f-494b-b06e-01dbcc0f8493","Type":"ContainerDied","Data":"b19539382c9a1f6da8731edc6dc197a43ee30da3378623c2d2f0f72da2858b44"} Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.333452 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536376-dbcs9" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.354422 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.421080 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f0a22a37-97f6-42ad-ba15-d6c8a352d831-ssh-key-openstack-edpm-ipam\") pod \"f0a22a37-97f6-42ad-ba15-d6c8a352d831\" (UID: \"f0a22a37-97f6-42ad-ba15-d6c8a352d831\") " Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.421163 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7rsq\" (UniqueName: \"kubernetes.io/projected/a5a10e44-232f-494b-b06e-01dbcc0f8493-kube-api-access-l7rsq\") pod \"a5a10e44-232f-494b-b06e-01dbcc0f8493\" (UID: \"a5a10e44-232f-494b-b06e-01dbcc0f8493\") " Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.421235 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mggmc\" (UniqueName: \"kubernetes.io/projected/f0a22a37-97f6-42ad-ba15-d6c8a352d831-kube-api-access-mggmc\") pod \"f0a22a37-97f6-42ad-ba15-d6c8a352d831\" (UID: \"f0a22a37-97f6-42ad-ba15-d6c8a352d831\") " Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.421286 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f0a22a37-97f6-42ad-ba15-d6c8a352d831-inventory\") pod \"f0a22a37-97f6-42ad-ba15-d6c8a352d831\" (UID: \"f0a22a37-97f6-42ad-ba15-d6c8a352d831\") " Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.433540 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5a10e44-232f-494b-b06e-01dbcc0f8493-kube-api-access-l7rsq" (OuterVolumeSpecName: "kube-api-access-l7rsq") pod "a5a10e44-232f-494b-b06e-01dbcc0f8493" (UID: "a5a10e44-232f-494b-b06e-01dbcc0f8493"). InnerVolumeSpecName "kube-api-access-l7rsq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.450694 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f0a22a37-97f6-42ad-ba15-d6c8a352d831-kube-api-access-mggmc" (OuterVolumeSpecName: "kube-api-access-mggmc") pod "f0a22a37-97f6-42ad-ba15-d6c8a352d831" (UID: "f0a22a37-97f6-42ad-ba15-d6c8a352d831"). InnerVolumeSpecName "kube-api-access-mggmc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.458781 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0a22a37-97f6-42ad-ba15-d6c8a352d831-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "f0a22a37-97f6-42ad-ba15-d6c8a352d831" (UID: "f0a22a37-97f6-42ad-ba15-d6c8a352d831"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.465238 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f0a22a37-97f6-42ad-ba15-d6c8a352d831-inventory" (OuterVolumeSpecName: "inventory") pod "f0a22a37-97f6-42ad-ba15-d6c8a352d831" (UID: "f0a22a37-97f6-42ad-ba15-d6c8a352d831"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.522893 4906 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f0a22a37-97f6-42ad-ba15-d6c8a352d831-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.522923 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7rsq\" (UniqueName: \"kubernetes.io/projected/a5a10e44-232f-494b-b06e-01dbcc0f8493-kube-api-access-l7rsq\") on node \"crc\" DevicePath \"\"" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.522935 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mggmc\" (UniqueName: \"kubernetes.io/projected/f0a22a37-97f6-42ad-ba15-d6c8a352d831-kube-api-access-mggmc\") on node \"crc\" DevicePath \"\"" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.522946 4906 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f0a22a37-97f6-42ad-ba15-d6c8a352d831-inventory\") on node \"crc\" DevicePath \"\"" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.651896 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536370-qtrvs"] Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.662873 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536370-qtrvs"] Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.845039 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8" event={"ID":"f0a22a37-97f6-42ad-ba15-d6c8a352d831","Type":"ContainerDied","Data":"26c884ae605e0bbff9973b407e900901733e12f654cf2d5735895a98d153d81d"} Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.845096 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="26c884ae605e0bbff9973b407e900901733e12f654cf2d5735895a98d153d81d" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.845097 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-p5bw8" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.847451 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536376-dbcs9" event={"ID":"a5a10e44-232f-494b-b06e-01dbcc0f8493","Type":"ContainerDied","Data":"7dcd7ed4b5575ebc59ffa3516ce50bfe3e7614842a4c4c5ad80f8ae79e969bad"} Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.847480 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7dcd7ed4b5575ebc59ffa3516ce50bfe3e7614842a4c4c5ad80f8ae79e969bad" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.847526 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536376-dbcs9" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.932564 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4"] Feb 27 08:56:05 crc kubenswrapper[4906]: E0227 08:56:05.933058 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f0a22a37-97f6-42ad-ba15-d6c8a352d831" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.933085 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f0a22a37-97f6-42ad-ba15-d6c8a352d831" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Feb 27 08:56:05 crc kubenswrapper[4906]: E0227 08:56:05.933106 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5a10e44-232f-494b-b06e-01dbcc0f8493" containerName="oc" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.933115 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5a10e44-232f-494b-b06e-01dbcc0f8493" containerName="oc" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.933366 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5a10e44-232f-494b-b06e-01dbcc0f8493" containerName="oc" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.933393 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f0a22a37-97f6-42ad-ba15-d6c8a352d831" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.934540 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.942584 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.942798 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d466j" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.942825 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.944257 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 27 08:56:05 crc kubenswrapper[4906]: I0227 08:56:05.980330 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4"] Feb 27 08:56:06 crc kubenswrapper[4906]: I0227 08:56:06.040668 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/42f73c0e-3d0e-4ba2-aa05-c1547471b938-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4\" (UID: \"42f73c0e-3d0e-4ba2-aa05-c1547471b938\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" Feb 27 08:56:06 crc kubenswrapper[4906]: I0227 08:56:06.040791 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/42f73c0e-3d0e-4ba2-aa05-c1547471b938-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4\" (UID: \"42f73c0e-3d0e-4ba2-aa05-c1547471b938\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" Feb 27 08:56:06 crc kubenswrapper[4906]: I0227 08:56:06.040823 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42f73c0e-3d0e-4ba2-aa05-c1547471b938-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4\" (UID: \"42f73c0e-3d0e-4ba2-aa05-c1547471b938\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" Feb 27 08:56:06 crc kubenswrapper[4906]: I0227 08:56:06.040861 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8j6dm\" (UniqueName: \"kubernetes.io/projected/42f73c0e-3d0e-4ba2-aa05-c1547471b938-kube-api-access-8j6dm\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4\" (UID: \"42f73c0e-3d0e-4ba2-aa05-c1547471b938\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" Feb 27 08:56:06 crc kubenswrapper[4906]: I0227 08:56:06.142363 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/42f73c0e-3d0e-4ba2-aa05-c1547471b938-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4\" (UID: \"42f73c0e-3d0e-4ba2-aa05-c1547471b938\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" Feb 27 08:56:06 crc kubenswrapper[4906]: I0227 08:56:06.142413 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42f73c0e-3d0e-4ba2-aa05-c1547471b938-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4\" (UID: \"42f73c0e-3d0e-4ba2-aa05-c1547471b938\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" Feb 27 08:56:06 crc kubenswrapper[4906]: I0227 08:56:06.142461 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8j6dm\" (UniqueName: \"kubernetes.io/projected/42f73c0e-3d0e-4ba2-aa05-c1547471b938-kube-api-access-8j6dm\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4\" (UID: \"42f73c0e-3d0e-4ba2-aa05-c1547471b938\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" Feb 27 08:56:06 crc kubenswrapper[4906]: I0227 08:56:06.142589 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/42f73c0e-3d0e-4ba2-aa05-c1547471b938-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4\" (UID: \"42f73c0e-3d0e-4ba2-aa05-c1547471b938\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" Feb 27 08:56:06 crc kubenswrapper[4906]: I0227 08:56:06.146921 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/42f73c0e-3d0e-4ba2-aa05-c1547471b938-ssh-key-openstack-edpm-ipam\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4\" (UID: \"42f73c0e-3d0e-4ba2-aa05-c1547471b938\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" Feb 27 08:56:06 crc kubenswrapper[4906]: I0227 08:56:06.147132 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42f73c0e-3d0e-4ba2-aa05-c1547471b938-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4\" (UID: \"42f73c0e-3d0e-4ba2-aa05-c1547471b938\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" Feb 27 08:56:06 crc kubenswrapper[4906]: I0227 08:56:06.148629 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/42f73c0e-3d0e-4ba2-aa05-c1547471b938-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4\" (UID: \"42f73c0e-3d0e-4ba2-aa05-c1547471b938\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" Feb 27 08:56:06 crc kubenswrapper[4906]: I0227 08:56:06.163421 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8j6dm\" (UniqueName: \"kubernetes.io/projected/42f73c0e-3d0e-4ba2-aa05-c1547471b938-kube-api-access-8j6dm\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4\" (UID: \"42f73c0e-3d0e-4ba2-aa05-c1547471b938\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" Feb 27 08:56:06 crc kubenswrapper[4906]: I0227 08:56:06.255324 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" Feb 27 08:56:06 crc kubenswrapper[4906]: I0227 08:56:06.563866 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e984b25a-d687-4a9b-a92a-b5e55d37ce8f" path="/var/lib/kubelet/pods/e984b25a-d687-4a9b-a92a-b5e55d37ce8f/volumes" Feb 27 08:56:06 crc kubenswrapper[4906]: I0227 08:56:06.891588 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4"] Feb 27 08:56:07 crc kubenswrapper[4906]: I0227 08:56:07.878718 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" event={"ID":"42f73c0e-3d0e-4ba2-aa05-c1547471b938","Type":"ContainerStarted","Data":"5251561837ba0dedc79889db27bec5e65decffd8fa14cb20a4b8032cb15ccbc0"} Feb 27 08:56:09 crc kubenswrapper[4906]: I0227 08:56:09.898919 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" event={"ID":"42f73c0e-3d0e-4ba2-aa05-c1547471b938","Type":"ContainerStarted","Data":"174381d029c6b30ef5ae5e2b1e53eeda5c4df13f90b23e024cbaee8a039f60de"} Feb 27 08:56:09 crc kubenswrapper[4906]: I0227 08:56:09.930468 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" podStartSLOduration=3.481286358 podStartE2EDuration="4.930439867s" podCreationTimestamp="2026-02-27 08:56:05 +0000 UTC" firstStartedPulling="2026-02-27 08:56:06.900025839 +0000 UTC m=+1665.294427449" lastFinishedPulling="2026-02-27 08:56:08.349179348 +0000 UTC m=+1666.743580958" observedRunningTime="2026-02-27 08:56:09.922820777 +0000 UTC m=+1668.317222387" watchObservedRunningTime="2026-02-27 08:56:09.930439867 +0000 UTC m=+1668.324841497" Feb 27 08:56:54 crc kubenswrapper[4906]: I0227 08:56:54.845439 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:56:54 crc kubenswrapper[4906]: I0227 08:56:54.846545 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:56:59 crc kubenswrapper[4906]: I0227 08:56:59.738601 4906 scope.go:117] "RemoveContainer" containerID="0965d89b29776672733411111b048996321fea370c02c0d8fcbea29bebdbd88b" Feb 27 08:56:59 crc kubenswrapper[4906]: I0227 08:56:59.801479 4906 scope.go:117] "RemoveContainer" containerID="30ea85af047d20481ff3464d917fb9df1bef54d6d15e94b1e03be61c3be6f469" Feb 27 08:57:16 crc kubenswrapper[4906]: I0227 08:57:16.714323 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-b2x4g"] Feb 27 08:57:16 crc kubenswrapper[4906]: I0227 08:57:16.728168 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b2x4g" Feb 27 08:57:16 crc kubenswrapper[4906]: I0227 08:57:16.760598 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b2x4g"] Feb 27 08:57:16 crc kubenswrapper[4906]: I0227 08:57:16.847967 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ab694fc-89b6-4486-b3f6-c89b07591e5f-utilities\") pod \"redhat-marketplace-b2x4g\" (UID: \"4ab694fc-89b6-4486-b3f6-c89b07591e5f\") " pod="openshift-marketplace/redhat-marketplace-b2x4g" Feb 27 08:57:16 crc kubenswrapper[4906]: I0227 08:57:16.848047 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwcwz\" (UniqueName: \"kubernetes.io/projected/4ab694fc-89b6-4486-b3f6-c89b07591e5f-kube-api-access-zwcwz\") pod \"redhat-marketplace-b2x4g\" (UID: \"4ab694fc-89b6-4486-b3f6-c89b07591e5f\") " pod="openshift-marketplace/redhat-marketplace-b2x4g" Feb 27 08:57:16 crc kubenswrapper[4906]: I0227 08:57:16.848447 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ab694fc-89b6-4486-b3f6-c89b07591e5f-catalog-content\") pod \"redhat-marketplace-b2x4g\" (UID: \"4ab694fc-89b6-4486-b3f6-c89b07591e5f\") " pod="openshift-marketplace/redhat-marketplace-b2x4g" Feb 27 08:57:16 crc kubenswrapper[4906]: I0227 08:57:16.951250 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwcwz\" (UniqueName: \"kubernetes.io/projected/4ab694fc-89b6-4486-b3f6-c89b07591e5f-kube-api-access-zwcwz\") pod \"redhat-marketplace-b2x4g\" (UID: \"4ab694fc-89b6-4486-b3f6-c89b07591e5f\") " pod="openshift-marketplace/redhat-marketplace-b2x4g" Feb 27 08:57:16 crc kubenswrapper[4906]: I0227 08:57:16.951399 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ab694fc-89b6-4486-b3f6-c89b07591e5f-catalog-content\") pod \"redhat-marketplace-b2x4g\" (UID: \"4ab694fc-89b6-4486-b3f6-c89b07591e5f\") " pod="openshift-marketplace/redhat-marketplace-b2x4g" Feb 27 08:57:16 crc kubenswrapper[4906]: I0227 08:57:16.951468 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ab694fc-89b6-4486-b3f6-c89b07591e5f-utilities\") pod \"redhat-marketplace-b2x4g\" (UID: \"4ab694fc-89b6-4486-b3f6-c89b07591e5f\") " pod="openshift-marketplace/redhat-marketplace-b2x4g" Feb 27 08:57:16 crc kubenswrapper[4906]: I0227 08:57:16.952157 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ab694fc-89b6-4486-b3f6-c89b07591e5f-catalog-content\") pod \"redhat-marketplace-b2x4g\" (UID: \"4ab694fc-89b6-4486-b3f6-c89b07591e5f\") " pod="openshift-marketplace/redhat-marketplace-b2x4g" Feb 27 08:57:16 crc kubenswrapper[4906]: I0227 08:57:16.952227 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ab694fc-89b6-4486-b3f6-c89b07591e5f-utilities\") pod \"redhat-marketplace-b2x4g\" (UID: \"4ab694fc-89b6-4486-b3f6-c89b07591e5f\") " pod="openshift-marketplace/redhat-marketplace-b2x4g" Feb 27 08:57:16 crc kubenswrapper[4906]: I0227 08:57:16.975148 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwcwz\" (UniqueName: \"kubernetes.io/projected/4ab694fc-89b6-4486-b3f6-c89b07591e5f-kube-api-access-zwcwz\") pod \"redhat-marketplace-b2x4g\" (UID: \"4ab694fc-89b6-4486-b3f6-c89b07591e5f\") " pod="openshift-marketplace/redhat-marketplace-b2x4g" Feb 27 08:57:17 crc kubenswrapper[4906]: I0227 08:57:17.073643 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b2x4g" Feb 27 08:57:17 crc kubenswrapper[4906]: I0227 08:57:17.612733 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-b2x4g"] Feb 27 08:57:17 crc kubenswrapper[4906]: I0227 08:57:17.759453 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b2x4g" event={"ID":"4ab694fc-89b6-4486-b3f6-c89b07591e5f","Type":"ContainerStarted","Data":"8c44bef7b907f328e50714db255a0c3686b7dd6f75a97e43559cea79e8dd6936"} Feb 27 08:57:18 crc kubenswrapper[4906]: I0227 08:57:18.774274 4906 generic.go:334] "Generic (PLEG): container finished" podID="4ab694fc-89b6-4486-b3f6-c89b07591e5f" containerID="29dc051255d060a5f496479ca274a9abbe88adb04953faecce3fdb3c4056adcf" exitCode=0 Feb 27 08:57:18 crc kubenswrapper[4906]: I0227 08:57:18.774388 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b2x4g" event={"ID":"4ab694fc-89b6-4486-b3f6-c89b07591e5f","Type":"ContainerDied","Data":"29dc051255d060a5f496479ca274a9abbe88adb04953faecce3fdb3c4056adcf"} Feb 27 08:57:20 crc kubenswrapper[4906]: I0227 08:57:20.796788 4906 generic.go:334] "Generic (PLEG): container finished" podID="4ab694fc-89b6-4486-b3f6-c89b07591e5f" containerID="81960907efec668887e3af7a8832f27358c03aabdb385c3df9ee9798705f9bcf" exitCode=0 Feb 27 08:57:20 crc kubenswrapper[4906]: I0227 08:57:20.796827 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b2x4g" event={"ID":"4ab694fc-89b6-4486-b3f6-c89b07591e5f","Type":"ContainerDied","Data":"81960907efec668887e3af7a8832f27358c03aabdb385c3df9ee9798705f9bcf"} Feb 27 08:57:21 crc kubenswrapper[4906]: I0227 08:57:21.813115 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b2x4g" event={"ID":"4ab694fc-89b6-4486-b3f6-c89b07591e5f","Type":"ContainerStarted","Data":"4faf72f5c7b8c188d339ddcc7a535b6a08a8b8a8da8b366cecbeb3fb07b18904"} Feb 27 08:57:21 crc kubenswrapper[4906]: I0227 08:57:21.855755 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-b2x4g" podStartSLOduration=3.41478112 podStartE2EDuration="5.85572346s" podCreationTimestamp="2026-02-27 08:57:16 +0000 UTC" firstStartedPulling="2026-02-27 08:57:18.777213336 +0000 UTC m=+1737.171614946" lastFinishedPulling="2026-02-27 08:57:21.218155676 +0000 UTC m=+1739.612557286" observedRunningTime="2026-02-27 08:57:21.835706404 +0000 UTC m=+1740.230108014" watchObservedRunningTime="2026-02-27 08:57:21.85572346 +0000 UTC m=+1740.250125090" Feb 27 08:57:23 crc kubenswrapper[4906]: I0227 08:57:23.327213 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vgstt"] Feb 27 08:57:23 crc kubenswrapper[4906]: I0227 08:57:23.330647 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vgstt" Feb 27 08:57:23 crc kubenswrapper[4906]: I0227 08:57:23.339304 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vgstt"] Feb 27 08:57:23 crc kubenswrapper[4906]: I0227 08:57:23.401898 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e30f3160-86cb-49d9-ac7b-46c6ad8a7195-catalog-content\") pod \"certified-operators-vgstt\" (UID: \"e30f3160-86cb-49d9-ac7b-46c6ad8a7195\") " pod="openshift-marketplace/certified-operators-vgstt" Feb 27 08:57:23 crc kubenswrapper[4906]: I0227 08:57:23.402133 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gllf\" (UniqueName: \"kubernetes.io/projected/e30f3160-86cb-49d9-ac7b-46c6ad8a7195-kube-api-access-2gllf\") pod \"certified-operators-vgstt\" (UID: \"e30f3160-86cb-49d9-ac7b-46c6ad8a7195\") " pod="openshift-marketplace/certified-operators-vgstt" Feb 27 08:57:23 crc kubenswrapper[4906]: I0227 08:57:23.402211 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e30f3160-86cb-49d9-ac7b-46c6ad8a7195-utilities\") pod \"certified-operators-vgstt\" (UID: \"e30f3160-86cb-49d9-ac7b-46c6ad8a7195\") " pod="openshift-marketplace/certified-operators-vgstt" Feb 27 08:57:23 crc kubenswrapper[4906]: I0227 08:57:23.519275 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e30f3160-86cb-49d9-ac7b-46c6ad8a7195-catalog-content\") pod \"certified-operators-vgstt\" (UID: \"e30f3160-86cb-49d9-ac7b-46c6ad8a7195\") " pod="openshift-marketplace/certified-operators-vgstt" Feb 27 08:57:23 crc kubenswrapper[4906]: I0227 08:57:23.529302 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gllf\" (UniqueName: \"kubernetes.io/projected/e30f3160-86cb-49d9-ac7b-46c6ad8a7195-kube-api-access-2gllf\") pod \"certified-operators-vgstt\" (UID: \"e30f3160-86cb-49d9-ac7b-46c6ad8a7195\") " pod="openshift-marketplace/certified-operators-vgstt" Feb 27 08:57:23 crc kubenswrapper[4906]: I0227 08:57:23.529907 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e30f3160-86cb-49d9-ac7b-46c6ad8a7195-utilities\") pod \"certified-operators-vgstt\" (UID: \"e30f3160-86cb-49d9-ac7b-46c6ad8a7195\") " pod="openshift-marketplace/certified-operators-vgstt" Feb 27 08:57:23 crc kubenswrapper[4906]: I0227 08:57:23.522948 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e30f3160-86cb-49d9-ac7b-46c6ad8a7195-catalog-content\") pod \"certified-operators-vgstt\" (UID: \"e30f3160-86cb-49d9-ac7b-46c6ad8a7195\") " pod="openshift-marketplace/certified-operators-vgstt" Feb 27 08:57:23 crc kubenswrapper[4906]: I0227 08:57:23.531017 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e30f3160-86cb-49d9-ac7b-46c6ad8a7195-utilities\") pod \"certified-operators-vgstt\" (UID: \"e30f3160-86cb-49d9-ac7b-46c6ad8a7195\") " pod="openshift-marketplace/certified-operators-vgstt" Feb 27 08:57:23 crc kubenswrapper[4906]: I0227 08:57:23.568594 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gllf\" (UniqueName: \"kubernetes.io/projected/e30f3160-86cb-49d9-ac7b-46c6ad8a7195-kube-api-access-2gllf\") pod \"certified-operators-vgstt\" (UID: \"e30f3160-86cb-49d9-ac7b-46c6ad8a7195\") " pod="openshift-marketplace/certified-operators-vgstt" Feb 27 08:57:23 crc kubenswrapper[4906]: I0227 08:57:23.665436 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vgstt" Feb 27 08:57:24 crc kubenswrapper[4906]: W0227 08:57:24.210872 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode30f3160_86cb_49d9_ac7b_46c6ad8a7195.slice/crio-0d419e1f55ef49d0525eb990cc62e203e707920b14a6f38fc803d600d09aa803 WatchSource:0}: Error finding container 0d419e1f55ef49d0525eb990cc62e203e707920b14a6f38fc803d600d09aa803: Status 404 returned error can't find the container with id 0d419e1f55ef49d0525eb990cc62e203e707920b14a6f38fc803d600d09aa803 Feb 27 08:57:24 crc kubenswrapper[4906]: I0227 08:57:24.212053 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vgstt"] Feb 27 08:57:24 crc kubenswrapper[4906]: I0227 08:57:24.844645 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:57:24 crc kubenswrapper[4906]: I0227 08:57:24.845182 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:57:24 crc kubenswrapper[4906]: I0227 08:57:24.862952 4906 generic.go:334] "Generic (PLEG): container finished" podID="e30f3160-86cb-49d9-ac7b-46c6ad8a7195" containerID="3fce03c5812dac3ff3d9e8b809bb26686639e75348bca2a274457964beb351ad" exitCode=0 Feb 27 08:57:24 crc kubenswrapper[4906]: I0227 08:57:24.863008 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgstt" event={"ID":"e30f3160-86cb-49d9-ac7b-46c6ad8a7195","Type":"ContainerDied","Data":"3fce03c5812dac3ff3d9e8b809bb26686639e75348bca2a274457964beb351ad"} Feb 27 08:57:24 crc kubenswrapper[4906]: I0227 08:57:24.863041 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgstt" event={"ID":"e30f3160-86cb-49d9-ac7b-46c6ad8a7195","Type":"ContainerStarted","Data":"0d419e1f55ef49d0525eb990cc62e203e707920b14a6f38fc803d600d09aa803"} Feb 27 08:57:26 crc kubenswrapper[4906]: I0227 08:57:26.884464 4906 generic.go:334] "Generic (PLEG): container finished" podID="e30f3160-86cb-49d9-ac7b-46c6ad8a7195" containerID="b36df23b5d33067d0a522c16523449673f93a60b8e747ff22296698ece9dbc77" exitCode=0 Feb 27 08:57:26 crc kubenswrapper[4906]: I0227 08:57:26.884672 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgstt" event={"ID":"e30f3160-86cb-49d9-ac7b-46c6ad8a7195","Type":"ContainerDied","Data":"b36df23b5d33067d0a522c16523449673f93a60b8e747ff22296698ece9dbc77"} Feb 27 08:57:27 crc kubenswrapper[4906]: I0227 08:57:27.074121 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-b2x4g" Feb 27 08:57:27 crc kubenswrapper[4906]: I0227 08:57:27.074599 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-b2x4g" Feb 27 08:57:27 crc kubenswrapper[4906]: I0227 08:57:27.130709 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-b2x4g" Feb 27 08:57:27 crc kubenswrapper[4906]: I0227 08:57:27.899024 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgstt" event={"ID":"e30f3160-86cb-49d9-ac7b-46c6ad8a7195","Type":"ContainerStarted","Data":"cd1e8f82e7d14b7f11f50b678b4c01d816c99b05b56434cb868e1f56b3d9c3e4"} Feb 27 08:57:27 crc kubenswrapper[4906]: I0227 08:57:27.923100 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vgstt" podStartSLOduration=2.417770694 podStartE2EDuration="4.923076183s" podCreationTimestamp="2026-02-27 08:57:23 +0000 UTC" firstStartedPulling="2026-02-27 08:57:24.864989525 +0000 UTC m=+1743.259391135" lastFinishedPulling="2026-02-27 08:57:27.370295014 +0000 UTC m=+1745.764696624" observedRunningTime="2026-02-27 08:57:27.915719959 +0000 UTC m=+1746.310121569" watchObservedRunningTime="2026-02-27 08:57:27.923076183 +0000 UTC m=+1746.317477793" Feb 27 08:57:27 crc kubenswrapper[4906]: I0227 08:57:27.962658 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-b2x4g" Feb 27 08:57:28 crc kubenswrapper[4906]: I0227 08:57:28.450910 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b2x4g"] Feb 27 08:57:29 crc kubenswrapper[4906]: I0227 08:57:29.918218 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-b2x4g" podUID="4ab694fc-89b6-4486-b3f6-c89b07591e5f" containerName="registry-server" containerID="cri-o://4faf72f5c7b8c188d339ddcc7a535b6a08a8b8a8da8b366cecbeb3fb07b18904" gracePeriod=2 Feb 27 08:57:30 crc kubenswrapper[4906]: I0227 08:57:30.427271 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b2x4g" Feb 27 08:57:30 crc kubenswrapper[4906]: I0227 08:57:30.600602 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ab694fc-89b6-4486-b3f6-c89b07591e5f-utilities\") pod \"4ab694fc-89b6-4486-b3f6-c89b07591e5f\" (UID: \"4ab694fc-89b6-4486-b3f6-c89b07591e5f\") " Feb 27 08:57:30 crc kubenswrapper[4906]: I0227 08:57:30.600909 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwcwz\" (UniqueName: \"kubernetes.io/projected/4ab694fc-89b6-4486-b3f6-c89b07591e5f-kube-api-access-zwcwz\") pod \"4ab694fc-89b6-4486-b3f6-c89b07591e5f\" (UID: \"4ab694fc-89b6-4486-b3f6-c89b07591e5f\") " Feb 27 08:57:30 crc kubenswrapper[4906]: I0227 08:57:30.601017 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ab694fc-89b6-4486-b3f6-c89b07591e5f-catalog-content\") pod \"4ab694fc-89b6-4486-b3f6-c89b07591e5f\" (UID: \"4ab694fc-89b6-4486-b3f6-c89b07591e5f\") " Feb 27 08:57:30 crc kubenswrapper[4906]: I0227 08:57:30.602733 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ab694fc-89b6-4486-b3f6-c89b07591e5f-utilities" (OuterVolumeSpecName: "utilities") pod "4ab694fc-89b6-4486-b3f6-c89b07591e5f" (UID: "4ab694fc-89b6-4486-b3f6-c89b07591e5f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:57:30 crc kubenswrapper[4906]: I0227 08:57:30.614187 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ab694fc-89b6-4486-b3f6-c89b07591e5f-kube-api-access-zwcwz" (OuterVolumeSpecName: "kube-api-access-zwcwz") pod "4ab694fc-89b6-4486-b3f6-c89b07591e5f" (UID: "4ab694fc-89b6-4486-b3f6-c89b07591e5f"). InnerVolumeSpecName "kube-api-access-zwcwz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:57:30 crc kubenswrapper[4906]: I0227 08:57:30.659450 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ab694fc-89b6-4486-b3f6-c89b07591e5f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4ab694fc-89b6-4486-b3f6-c89b07591e5f" (UID: "4ab694fc-89b6-4486-b3f6-c89b07591e5f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:57:30 crc kubenswrapper[4906]: I0227 08:57:30.704641 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwcwz\" (UniqueName: \"kubernetes.io/projected/4ab694fc-89b6-4486-b3f6-c89b07591e5f-kube-api-access-zwcwz\") on node \"crc\" DevicePath \"\"" Feb 27 08:57:30 crc kubenswrapper[4906]: I0227 08:57:30.704918 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ab694fc-89b6-4486-b3f6-c89b07591e5f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 08:57:30 crc kubenswrapper[4906]: I0227 08:57:30.704942 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ab694fc-89b6-4486-b3f6-c89b07591e5f-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 08:57:30 crc kubenswrapper[4906]: I0227 08:57:30.930131 4906 generic.go:334] "Generic (PLEG): container finished" podID="4ab694fc-89b6-4486-b3f6-c89b07591e5f" containerID="4faf72f5c7b8c188d339ddcc7a535b6a08a8b8a8da8b366cecbeb3fb07b18904" exitCode=0 Feb 27 08:57:30 crc kubenswrapper[4906]: I0227 08:57:30.930196 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b2x4g" event={"ID":"4ab694fc-89b6-4486-b3f6-c89b07591e5f","Type":"ContainerDied","Data":"4faf72f5c7b8c188d339ddcc7a535b6a08a8b8a8da8b366cecbeb3fb07b18904"} Feb 27 08:57:30 crc kubenswrapper[4906]: I0227 08:57:30.930234 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-b2x4g" Feb 27 08:57:30 crc kubenswrapper[4906]: I0227 08:57:30.930261 4906 scope.go:117] "RemoveContainer" containerID="4faf72f5c7b8c188d339ddcc7a535b6a08a8b8a8da8b366cecbeb3fb07b18904" Feb 27 08:57:30 crc kubenswrapper[4906]: I0227 08:57:30.930240 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-b2x4g" event={"ID":"4ab694fc-89b6-4486-b3f6-c89b07591e5f","Type":"ContainerDied","Data":"8c44bef7b907f328e50714db255a0c3686b7dd6f75a97e43559cea79e8dd6936"} Feb 27 08:57:30 crc kubenswrapper[4906]: I0227 08:57:30.952955 4906 scope.go:117] "RemoveContainer" containerID="81960907efec668887e3af7a8832f27358c03aabdb385c3df9ee9798705f9bcf" Feb 27 08:57:31 crc kubenswrapper[4906]: I0227 08:57:31.010358 4906 scope.go:117] "RemoveContainer" containerID="29dc051255d060a5f496479ca274a9abbe88adb04953faecce3fdb3c4056adcf" Feb 27 08:57:31 crc kubenswrapper[4906]: I0227 08:57:31.015525 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-b2x4g"] Feb 27 08:57:31 crc kubenswrapper[4906]: I0227 08:57:31.035183 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-b2x4g"] Feb 27 08:57:31 crc kubenswrapper[4906]: I0227 08:57:31.037459 4906 scope.go:117] "RemoveContainer" containerID="4faf72f5c7b8c188d339ddcc7a535b6a08a8b8a8da8b366cecbeb3fb07b18904" Feb 27 08:57:31 crc kubenswrapper[4906]: E0227 08:57:31.038152 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4faf72f5c7b8c188d339ddcc7a535b6a08a8b8a8da8b366cecbeb3fb07b18904\": container with ID starting with 4faf72f5c7b8c188d339ddcc7a535b6a08a8b8a8da8b366cecbeb3fb07b18904 not found: ID does not exist" containerID="4faf72f5c7b8c188d339ddcc7a535b6a08a8b8a8da8b366cecbeb3fb07b18904" Feb 27 08:57:31 crc kubenswrapper[4906]: I0227 08:57:31.038202 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4faf72f5c7b8c188d339ddcc7a535b6a08a8b8a8da8b366cecbeb3fb07b18904"} err="failed to get container status \"4faf72f5c7b8c188d339ddcc7a535b6a08a8b8a8da8b366cecbeb3fb07b18904\": rpc error: code = NotFound desc = could not find container \"4faf72f5c7b8c188d339ddcc7a535b6a08a8b8a8da8b366cecbeb3fb07b18904\": container with ID starting with 4faf72f5c7b8c188d339ddcc7a535b6a08a8b8a8da8b366cecbeb3fb07b18904 not found: ID does not exist" Feb 27 08:57:31 crc kubenswrapper[4906]: I0227 08:57:31.038228 4906 scope.go:117] "RemoveContainer" containerID="81960907efec668887e3af7a8832f27358c03aabdb385c3df9ee9798705f9bcf" Feb 27 08:57:31 crc kubenswrapper[4906]: E0227 08:57:31.038713 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"81960907efec668887e3af7a8832f27358c03aabdb385c3df9ee9798705f9bcf\": container with ID starting with 81960907efec668887e3af7a8832f27358c03aabdb385c3df9ee9798705f9bcf not found: ID does not exist" containerID="81960907efec668887e3af7a8832f27358c03aabdb385c3df9ee9798705f9bcf" Feb 27 08:57:31 crc kubenswrapper[4906]: I0227 08:57:31.038766 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"81960907efec668887e3af7a8832f27358c03aabdb385c3df9ee9798705f9bcf"} err="failed to get container status \"81960907efec668887e3af7a8832f27358c03aabdb385c3df9ee9798705f9bcf\": rpc error: code = NotFound desc = could not find container \"81960907efec668887e3af7a8832f27358c03aabdb385c3df9ee9798705f9bcf\": container with ID starting with 81960907efec668887e3af7a8832f27358c03aabdb385c3df9ee9798705f9bcf not found: ID does not exist" Feb 27 08:57:31 crc kubenswrapper[4906]: I0227 08:57:31.038801 4906 scope.go:117] "RemoveContainer" containerID="29dc051255d060a5f496479ca274a9abbe88adb04953faecce3fdb3c4056adcf" Feb 27 08:57:31 crc kubenswrapper[4906]: E0227 08:57:31.039245 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29dc051255d060a5f496479ca274a9abbe88adb04953faecce3fdb3c4056adcf\": container with ID starting with 29dc051255d060a5f496479ca274a9abbe88adb04953faecce3fdb3c4056adcf not found: ID does not exist" containerID="29dc051255d060a5f496479ca274a9abbe88adb04953faecce3fdb3c4056adcf" Feb 27 08:57:31 crc kubenswrapper[4906]: I0227 08:57:31.039316 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29dc051255d060a5f496479ca274a9abbe88adb04953faecce3fdb3c4056adcf"} err="failed to get container status \"29dc051255d060a5f496479ca274a9abbe88adb04953faecce3fdb3c4056adcf\": rpc error: code = NotFound desc = could not find container \"29dc051255d060a5f496479ca274a9abbe88adb04953faecce3fdb3c4056adcf\": container with ID starting with 29dc051255d060a5f496479ca274a9abbe88adb04953faecce3fdb3c4056adcf not found: ID does not exist" Feb 27 08:57:32 crc kubenswrapper[4906]: I0227 08:57:32.571072 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ab694fc-89b6-4486-b3f6-c89b07591e5f" path="/var/lib/kubelet/pods/4ab694fc-89b6-4486-b3f6-c89b07591e5f/volumes" Feb 27 08:57:33 crc kubenswrapper[4906]: I0227 08:57:33.666357 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vgstt" Feb 27 08:57:33 crc kubenswrapper[4906]: I0227 08:57:33.666423 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vgstt" Feb 27 08:57:33 crc kubenswrapper[4906]: I0227 08:57:33.723519 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vgstt" Feb 27 08:57:34 crc kubenswrapper[4906]: I0227 08:57:34.011723 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vgstt" Feb 27 08:57:34 crc kubenswrapper[4906]: I0227 08:57:34.654904 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vgstt"] Feb 27 08:57:35 crc kubenswrapper[4906]: I0227 08:57:35.990358 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vgstt" podUID="e30f3160-86cb-49d9-ac7b-46c6ad8a7195" containerName="registry-server" containerID="cri-o://cd1e8f82e7d14b7f11f50b678b4c01d816c99b05b56434cb868e1f56b3d9c3e4" gracePeriod=2 Feb 27 08:57:36 crc kubenswrapper[4906]: I0227 08:57:36.534869 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vgstt" Feb 27 08:57:36 crc kubenswrapper[4906]: I0227 08:57:36.642902 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gllf\" (UniqueName: \"kubernetes.io/projected/e30f3160-86cb-49d9-ac7b-46c6ad8a7195-kube-api-access-2gllf\") pod \"e30f3160-86cb-49d9-ac7b-46c6ad8a7195\" (UID: \"e30f3160-86cb-49d9-ac7b-46c6ad8a7195\") " Feb 27 08:57:36 crc kubenswrapper[4906]: I0227 08:57:36.643529 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e30f3160-86cb-49d9-ac7b-46c6ad8a7195-utilities\") pod \"e30f3160-86cb-49d9-ac7b-46c6ad8a7195\" (UID: \"e30f3160-86cb-49d9-ac7b-46c6ad8a7195\") " Feb 27 08:57:36 crc kubenswrapper[4906]: I0227 08:57:36.643572 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e30f3160-86cb-49d9-ac7b-46c6ad8a7195-catalog-content\") pod \"e30f3160-86cb-49d9-ac7b-46c6ad8a7195\" (UID: \"e30f3160-86cb-49d9-ac7b-46c6ad8a7195\") " Feb 27 08:57:36 crc kubenswrapper[4906]: I0227 08:57:36.644554 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e30f3160-86cb-49d9-ac7b-46c6ad8a7195-utilities" (OuterVolumeSpecName: "utilities") pod "e30f3160-86cb-49d9-ac7b-46c6ad8a7195" (UID: "e30f3160-86cb-49d9-ac7b-46c6ad8a7195"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:57:36 crc kubenswrapper[4906]: I0227 08:57:36.652032 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e30f3160-86cb-49d9-ac7b-46c6ad8a7195-kube-api-access-2gllf" (OuterVolumeSpecName: "kube-api-access-2gllf") pod "e30f3160-86cb-49d9-ac7b-46c6ad8a7195" (UID: "e30f3160-86cb-49d9-ac7b-46c6ad8a7195"). InnerVolumeSpecName "kube-api-access-2gllf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:57:36 crc kubenswrapper[4906]: I0227 08:57:36.712608 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e30f3160-86cb-49d9-ac7b-46c6ad8a7195-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e30f3160-86cb-49d9-ac7b-46c6ad8a7195" (UID: "e30f3160-86cb-49d9-ac7b-46c6ad8a7195"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 08:57:36 crc kubenswrapper[4906]: I0227 08:57:36.747072 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e30f3160-86cb-49d9-ac7b-46c6ad8a7195-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 08:57:36 crc kubenswrapper[4906]: I0227 08:57:36.747194 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e30f3160-86cb-49d9-ac7b-46c6ad8a7195-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 08:57:36 crc kubenswrapper[4906]: I0227 08:57:36.747255 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gllf\" (UniqueName: \"kubernetes.io/projected/e30f3160-86cb-49d9-ac7b-46c6ad8a7195-kube-api-access-2gllf\") on node \"crc\" DevicePath \"\"" Feb 27 08:57:37 crc kubenswrapper[4906]: I0227 08:57:37.017041 4906 generic.go:334] "Generic (PLEG): container finished" podID="e30f3160-86cb-49d9-ac7b-46c6ad8a7195" containerID="cd1e8f82e7d14b7f11f50b678b4c01d816c99b05b56434cb868e1f56b3d9c3e4" exitCode=0 Feb 27 08:57:37 crc kubenswrapper[4906]: I0227 08:57:37.017098 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgstt" event={"ID":"e30f3160-86cb-49d9-ac7b-46c6ad8a7195","Type":"ContainerDied","Data":"cd1e8f82e7d14b7f11f50b678b4c01d816c99b05b56434cb868e1f56b3d9c3e4"} Feb 27 08:57:37 crc kubenswrapper[4906]: I0227 08:57:37.017138 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vgstt" event={"ID":"e30f3160-86cb-49d9-ac7b-46c6ad8a7195","Type":"ContainerDied","Data":"0d419e1f55ef49d0525eb990cc62e203e707920b14a6f38fc803d600d09aa803"} Feb 27 08:57:37 crc kubenswrapper[4906]: I0227 08:57:37.017134 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vgstt" Feb 27 08:57:37 crc kubenswrapper[4906]: I0227 08:57:37.018248 4906 scope.go:117] "RemoveContainer" containerID="cd1e8f82e7d14b7f11f50b678b4c01d816c99b05b56434cb868e1f56b3d9c3e4" Feb 27 08:57:37 crc kubenswrapper[4906]: I0227 08:57:37.060546 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vgstt"] Feb 27 08:57:37 crc kubenswrapper[4906]: I0227 08:57:37.062131 4906 scope.go:117] "RemoveContainer" containerID="b36df23b5d33067d0a522c16523449673f93a60b8e747ff22296698ece9dbc77" Feb 27 08:57:37 crc kubenswrapper[4906]: I0227 08:57:37.069273 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vgstt"] Feb 27 08:57:37 crc kubenswrapper[4906]: I0227 08:57:37.090931 4906 scope.go:117] "RemoveContainer" containerID="3fce03c5812dac3ff3d9e8b809bb26686639e75348bca2a274457964beb351ad" Feb 27 08:57:37 crc kubenswrapper[4906]: I0227 08:57:37.138148 4906 scope.go:117] "RemoveContainer" containerID="cd1e8f82e7d14b7f11f50b678b4c01d816c99b05b56434cb868e1f56b3d9c3e4" Feb 27 08:57:37 crc kubenswrapper[4906]: E0227 08:57:37.138850 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd1e8f82e7d14b7f11f50b678b4c01d816c99b05b56434cb868e1f56b3d9c3e4\": container with ID starting with cd1e8f82e7d14b7f11f50b678b4c01d816c99b05b56434cb868e1f56b3d9c3e4 not found: ID does not exist" containerID="cd1e8f82e7d14b7f11f50b678b4c01d816c99b05b56434cb868e1f56b3d9c3e4" Feb 27 08:57:37 crc kubenswrapper[4906]: I0227 08:57:37.138908 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd1e8f82e7d14b7f11f50b678b4c01d816c99b05b56434cb868e1f56b3d9c3e4"} err="failed to get container status \"cd1e8f82e7d14b7f11f50b678b4c01d816c99b05b56434cb868e1f56b3d9c3e4\": rpc error: code = NotFound desc = could not find container \"cd1e8f82e7d14b7f11f50b678b4c01d816c99b05b56434cb868e1f56b3d9c3e4\": container with ID starting with cd1e8f82e7d14b7f11f50b678b4c01d816c99b05b56434cb868e1f56b3d9c3e4 not found: ID does not exist" Feb 27 08:57:37 crc kubenswrapper[4906]: I0227 08:57:37.138941 4906 scope.go:117] "RemoveContainer" containerID="b36df23b5d33067d0a522c16523449673f93a60b8e747ff22296698ece9dbc77" Feb 27 08:57:37 crc kubenswrapper[4906]: E0227 08:57:37.139485 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b36df23b5d33067d0a522c16523449673f93a60b8e747ff22296698ece9dbc77\": container with ID starting with b36df23b5d33067d0a522c16523449673f93a60b8e747ff22296698ece9dbc77 not found: ID does not exist" containerID="b36df23b5d33067d0a522c16523449673f93a60b8e747ff22296698ece9dbc77" Feb 27 08:57:37 crc kubenswrapper[4906]: I0227 08:57:37.139535 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b36df23b5d33067d0a522c16523449673f93a60b8e747ff22296698ece9dbc77"} err="failed to get container status \"b36df23b5d33067d0a522c16523449673f93a60b8e747ff22296698ece9dbc77\": rpc error: code = NotFound desc = could not find container \"b36df23b5d33067d0a522c16523449673f93a60b8e747ff22296698ece9dbc77\": container with ID starting with b36df23b5d33067d0a522c16523449673f93a60b8e747ff22296698ece9dbc77 not found: ID does not exist" Feb 27 08:57:37 crc kubenswrapper[4906]: I0227 08:57:37.139574 4906 scope.go:117] "RemoveContainer" containerID="3fce03c5812dac3ff3d9e8b809bb26686639e75348bca2a274457964beb351ad" Feb 27 08:57:37 crc kubenswrapper[4906]: E0227 08:57:37.140108 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3fce03c5812dac3ff3d9e8b809bb26686639e75348bca2a274457964beb351ad\": container with ID starting with 3fce03c5812dac3ff3d9e8b809bb26686639e75348bca2a274457964beb351ad not found: ID does not exist" containerID="3fce03c5812dac3ff3d9e8b809bb26686639e75348bca2a274457964beb351ad" Feb 27 08:57:37 crc kubenswrapper[4906]: I0227 08:57:37.140159 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3fce03c5812dac3ff3d9e8b809bb26686639e75348bca2a274457964beb351ad"} err="failed to get container status \"3fce03c5812dac3ff3d9e8b809bb26686639e75348bca2a274457964beb351ad\": rpc error: code = NotFound desc = could not find container \"3fce03c5812dac3ff3d9e8b809bb26686639e75348bca2a274457964beb351ad\": container with ID starting with 3fce03c5812dac3ff3d9e8b809bb26686639e75348bca2a274457964beb351ad not found: ID does not exist" Feb 27 08:57:38 crc kubenswrapper[4906]: I0227 08:57:38.571512 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e30f3160-86cb-49d9-ac7b-46c6ad8a7195" path="/var/lib/kubelet/pods/e30f3160-86cb-49d9-ac7b-46c6ad8a7195/volumes" Feb 27 08:57:54 crc kubenswrapper[4906]: I0227 08:57:54.844829 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 08:57:54 crc kubenswrapper[4906]: I0227 08:57:54.845766 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 08:57:54 crc kubenswrapper[4906]: I0227 08:57:54.845833 4906 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 08:57:54 crc kubenswrapper[4906]: I0227 08:57:54.846850 4906 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13"} pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 27 08:57:54 crc kubenswrapper[4906]: I0227 08:57:54.846934 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" containerID="cri-o://a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" gracePeriod=600 Feb 27 08:57:54 crc kubenswrapper[4906]: E0227 08:57:54.992158 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 08:57:55 crc kubenswrapper[4906]: I0227 08:57:55.261302 4906 generic.go:334] "Generic (PLEG): container finished" podID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" exitCode=0 Feb 27 08:57:55 crc kubenswrapper[4906]: I0227 08:57:55.261358 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerDied","Data":"a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13"} Feb 27 08:57:55 crc kubenswrapper[4906]: I0227 08:57:55.261402 4906 scope.go:117] "RemoveContainer" containerID="400ed111d9890552c3d11872a2da1327403afcb99497b3740757d1aed8ae0dbb" Feb 27 08:57:55 crc kubenswrapper[4906]: I0227 08:57:55.262548 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 08:57:55 crc kubenswrapper[4906]: E0227 08:57:55.263098 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 08:57:59 crc kubenswrapper[4906]: I0227 08:57:59.957354 4906 scope.go:117] "RemoveContainer" containerID="d1136f91f3db289760b67bf78418a2809b37b6cc1c92b62bf1341729971407ed" Feb 27 08:58:00 crc kubenswrapper[4906]: I0227 08:58:00.154764 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536378-h48j4"] Feb 27 08:58:00 crc kubenswrapper[4906]: E0227 08:58:00.155593 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ab694fc-89b6-4486-b3f6-c89b07591e5f" containerName="registry-server" Feb 27 08:58:00 crc kubenswrapper[4906]: I0227 08:58:00.155620 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ab694fc-89b6-4486-b3f6-c89b07591e5f" containerName="registry-server" Feb 27 08:58:00 crc kubenswrapper[4906]: E0227 08:58:00.155646 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ab694fc-89b6-4486-b3f6-c89b07591e5f" containerName="extract-utilities" Feb 27 08:58:00 crc kubenswrapper[4906]: I0227 08:58:00.155657 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ab694fc-89b6-4486-b3f6-c89b07591e5f" containerName="extract-utilities" Feb 27 08:58:00 crc kubenswrapper[4906]: E0227 08:58:00.155671 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ab694fc-89b6-4486-b3f6-c89b07591e5f" containerName="extract-content" Feb 27 08:58:00 crc kubenswrapper[4906]: I0227 08:58:00.155680 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ab694fc-89b6-4486-b3f6-c89b07591e5f" containerName="extract-content" Feb 27 08:58:00 crc kubenswrapper[4906]: E0227 08:58:00.155697 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e30f3160-86cb-49d9-ac7b-46c6ad8a7195" containerName="registry-server" Feb 27 08:58:00 crc kubenswrapper[4906]: I0227 08:58:00.155707 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="e30f3160-86cb-49d9-ac7b-46c6ad8a7195" containerName="registry-server" Feb 27 08:58:00 crc kubenswrapper[4906]: E0227 08:58:00.155724 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e30f3160-86cb-49d9-ac7b-46c6ad8a7195" containerName="extract-content" Feb 27 08:58:00 crc kubenswrapper[4906]: I0227 08:58:00.155735 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="e30f3160-86cb-49d9-ac7b-46c6ad8a7195" containerName="extract-content" Feb 27 08:58:00 crc kubenswrapper[4906]: E0227 08:58:00.155750 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e30f3160-86cb-49d9-ac7b-46c6ad8a7195" containerName="extract-utilities" Feb 27 08:58:00 crc kubenswrapper[4906]: I0227 08:58:00.155759 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="e30f3160-86cb-49d9-ac7b-46c6ad8a7195" containerName="extract-utilities" Feb 27 08:58:00 crc kubenswrapper[4906]: I0227 08:58:00.156136 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="e30f3160-86cb-49d9-ac7b-46c6ad8a7195" containerName="registry-server" Feb 27 08:58:00 crc kubenswrapper[4906]: I0227 08:58:00.156184 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ab694fc-89b6-4486-b3f6-c89b07591e5f" containerName="registry-server" Feb 27 08:58:00 crc kubenswrapper[4906]: I0227 08:58:00.157408 4906 scope.go:117] "RemoveContainer" containerID="ede6b37c237a95667008a06057fd933008e9e665abcee2a3c2a28ce6a6594391" Feb 27 08:58:00 crc kubenswrapper[4906]: I0227 08:58:00.157557 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536378-h48j4" Feb 27 08:58:00 crc kubenswrapper[4906]: I0227 08:58:00.161450 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 08:58:00 crc kubenswrapper[4906]: I0227 08:58:00.161659 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 08:58:00 crc kubenswrapper[4906]: I0227 08:58:00.161788 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 08:58:00 crc kubenswrapper[4906]: I0227 08:58:00.172597 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536378-h48j4"] Feb 27 08:58:00 crc kubenswrapper[4906]: I0227 08:58:00.246105 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwkbf\" (UniqueName: \"kubernetes.io/projected/ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854-kube-api-access-dwkbf\") pod \"auto-csr-approver-29536378-h48j4\" (UID: \"ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854\") " pod="openshift-infra/auto-csr-approver-29536378-h48j4" Feb 27 08:58:00 crc kubenswrapper[4906]: I0227 08:58:00.349147 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwkbf\" (UniqueName: \"kubernetes.io/projected/ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854-kube-api-access-dwkbf\") pod \"auto-csr-approver-29536378-h48j4\" (UID: \"ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854\") " pod="openshift-infra/auto-csr-approver-29536378-h48j4" Feb 27 08:58:00 crc kubenswrapper[4906]: I0227 08:58:00.380507 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwkbf\" (UniqueName: \"kubernetes.io/projected/ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854-kube-api-access-dwkbf\") pod \"auto-csr-approver-29536378-h48j4\" (UID: \"ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854\") " pod="openshift-infra/auto-csr-approver-29536378-h48j4" Feb 27 08:58:00 crc kubenswrapper[4906]: I0227 08:58:00.483099 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536378-h48j4" Feb 27 08:58:00 crc kubenswrapper[4906]: I0227 08:58:00.991317 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536378-h48j4"] Feb 27 08:58:01 crc kubenswrapper[4906]: I0227 08:58:00.999867 4906 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 27 08:58:01 crc kubenswrapper[4906]: I0227 08:58:01.324684 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536378-h48j4" event={"ID":"ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854","Type":"ContainerStarted","Data":"be2b60351740a1df530bd5594fe3332d1d5bd079e5727782548cb9bc4fdfc1af"} Feb 27 08:58:03 crc kubenswrapper[4906]: I0227 08:58:03.357536 4906 generic.go:334] "Generic (PLEG): container finished" podID="ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854" containerID="93a45b4f4f18ed9a95dc1f8381529a21fe75715a5698a5d841105c824970914d" exitCode=0 Feb 27 08:58:03 crc kubenswrapper[4906]: I0227 08:58:03.357772 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536378-h48j4" event={"ID":"ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854","Type":"ContainerDied","Data":"93a45b4f4f18ed9a95dc1f8381529a21fe75715a5698a5d841105c824970914d"} Feb 27 08:58:04 crc kubenswrapper[4906]: I0227 08:58:04.724445 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536378-h48j4" Feb 27 08:58:04 crc kubenswrapper[4906]: I0227 08:58:04.857974 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwkbf\" (UniqueName: \"kubernetes.io/projected/ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854-kube-api-access-dwkbf\") pod \"ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854\" (UID: \"ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854\") " Feb 27 08:58:04 crc kubenswrapper[4906]: I0227 08:58:04.867351 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854-kube-api-access-dwkbf" (OuterVolumeSpecName: "kube-api-access-dwkbf") pod "ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854" (UID: "ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854"). InnerVolumeSpecName "kube-api-access-dwkbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:58:04 crc kubenswrapper[4906]: I0227 08:58:04.962070 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwkbf\" (UniqueName: \"kubernetes.io/projected/ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854-kube-api-access-dwkbf\") on node \"crc\" DevicePath \"\"" Feb 27 08:58:05 crc kubenswrapper[4906]: I0227 08:58:05.385027 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536378-h48j4" event={"ID":"ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854","Type":"ContainerDied","Data":"be2b60351740a1df530bd5594fe3332d1d5bd079e5727782548cb9bc4fdfc1af"} Feb 27 08:58:05 crc kubenswrapper[4906]: I0227 08:58:05.385083 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be2b60351740a1df530bd5594fe3332d1d5bd079e5727782548cb9bc4fdfc1af" Feb 27 08:58:05 crc kubenswrapper[4906]: I0227 08:58:05.385132 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536378-h48j4" Feb 27 08:58:05 crc kubenswrapper[4906]: I0227 08:58:05.832391 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536372-clhfq"] Feb 27 08:58:05 crc kubenswrapper[4906]: I0227 08:58:05.852583 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536372-clhfq"] Feb 27 08:58:06 crc kubenswrapper[4906]: I0227 08:58:06.564247 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ba4b224-b847-4ce9-a4f4-52fa5001cb2d" path="/var/lib/kubelet/pods/2ba4b224-b847-4ce9-a4f4-52fa5001cb2d/volumes" Feb 27 08:58:08 crc kubenswrapper[4906]: I0227 08:58:08.553947 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 08:58:08 crc kubenswrapper[4906]: E0227 08:58:08.554791 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 08:58:19 crc kubenswrapper[4906]: I0227 08:58:19.552553 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 08:58:19 crc kubenswrapper[4906]: E0227 08:58:19.553662 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 08:58:31 crc kubenswrapper[4906]: I0227 08:58:31.552441 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 08:58:31 crc kubenswrapper[4906]: E0227 08:58:31.553297 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 08:58:43 crc kubenswrapper[4906]: I0227 08:58:43.552466 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 08:58:43 crc kubenswrapper[4906]: E0227 08:58:43.553264 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 08:58:57 crc kubenswrapper[4906]: I0227 08:58:57.553397 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 08:58:57 crc kubenswrapper[4906]: E0227 08:58:57.555008 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 08:59:00 crc kubenswrapper[4906]: I0227 08:59:00.266610 4906 scope.go:117] "RemoveContainer" containerID="48c99ba5361f49134c92955907415c63befe9dd15596f9bddc355e3a580e494a" Feb 27 08:59:00 crc kubenswrapper[4906]: I0227 08:59:00.308696 4906 scope.go:117] "RemoveContainer" containerID="3b839e8657d267f2171c4b62f23b5996cb039719c6933b1c1a508b488bff636f" Feb 27 08:59:00 crc kubenswrapper[4906]: I0227 08:59:00.359032 4906 scope.go:117] "RemoveContainer" containerID="c93e04aac3f49823e50ad872c303aa87a369dd8d13f24bbf659b849a049e6621" Feb 27 08:59:00 crc kubenswrapper[4906]: I0227 08:59:00.394557 4906 scope.go:117] "RemoveContainer" containerID="7a766cf0066b091d80d6a0a7c14085467d9787c263d38e1ac76b3b3ceb8e2019" Feb 27 08:59:00 crc kubenswrapper[4906]: I0227 08:59:00.420145 4906 scope.go:117] "RemoveContainer" containerID="94ffffe4681f59ad3db82f7bb529682fde43af4743b824c771a66455ec7dc73d" Feb 27 08:59:12 crc kubenswrapper[4906]: I0227 08:59:12.561291 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 08:59:12 crc kubenswrapper[4906]: E0227 08:59:12.562376 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 08:59:13 crc kubenswrapper[4906]: I0227 08:59:13.181624 4906 generic.go:334] "Generic (PLEG): container finished" podID="42f73c0e-3d0e-4ba2-aa05-c1547471b938" containerID="174381d029c6b30ef5ae5e2b1e53eeda5c4df13f90b23e024cbaee8a039f60de" exitCode=0 Feb 27 08:59:13 crc kubenswrapper[4906]: I0227 08:59:13.181851 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" event={"ID":"42f73c0e-3d0e-4ba2-aa05-c1547471b938","Type":"ContainerDied","Data":"174381d029c6b30ef5ae5e2b1e53eeda5c4df13f90b23e024cbaee8a039f60de"} Feb 27 08:59:14 crc kubenswrapper[4906]: I0227 08:59:14.656824 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" Feb 27 08:59:14 crc kubenswrapper[4906]: I0227 08:59:14.695631 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8j6dm\" (UniqueName: \"kubernetes.io/projected/42f73c0e-3d0e-4ba2-aa05-c1547471b938-kube-api-access-8j6dm\") pod \"42f73c0e-3d0e-4ba2-aa05-c1547471b938\" (UID: \"42f73c0e-3d0e-4ba2-aa05-c1547471b938\") " Feb 27 08:59:14 crc kubenswrapper[4906]: I0227 08:59:14.695725 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42f73c0e-3d0e-4ba2-aa05-c1547471b938-bootstrap-combined-ca-bundle\") pod \"42f73c0e-3d0e-4ba2-aa05-c1547471b938\" (UID: \"42f73c0e-3d0e-4ba2-aa05-c1547471b938\") " Feb 27 08:59:14 crc kubenswrapper[4906]: I0227 08:59:14.695814 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/42f73c0e-3d0e-4ba2-aa05-c1547471b938-ssh-key-openstack-edpm-ipam\") pod \"42f73c0e-3d0e-4ba2-aa05-c1547471b938\" (UID: \"42f73c0e-3d0e-4ba2-aa05-c1547471b938\") " Feb 27 08:59:14 crc kubenswrapper[4906]: I0227 08:59:14.696000 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/42f73c0e-3d0e-4ba2-aa05-c1547471b938-inventory\") pod \"42f73c0e-3d0e-4ba2-aa05-c1547471b938\" (UID: \"42f73c0e-3d0e-4ba2-aa05-c1547471b938\") " Feb 27 08:59:14 crc kubenswrapper[4906]: I0227 08:59:14.703971 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42f73c0e-3d0e-4ba2-aa05-c1547471b938-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "42f73c0e-3d0e-4ba2-aa05-c1547471b938" (UID: "42f73c0e-3d0e-4ba2-aa05-c1547471b938"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:59:14 crc kubenswrapper[4906]: I0227 08:59:14.704069 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42f73c0e-3d0e-4ba2-aa05-c1547471b938-kube-api-access-8j6dm" (OuterVolumeSpecName: "kube-api-access-8j6dm") pod "42f73c0e-3d0e-4ba2-aa05-c1547471b938" (UID: "42f73c0e-3d0e-4ba2-aa05-c1547471b938"). InnerVolumeSpecName "kube-api-access-8j6dm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 08:59:14 crc kubenswrapper[4906]: I0227 08:59:14.728547 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42f73c0e-3d0e-4ba2-aa05-c1547471b938-inventory" (OuterVolumeSpecName: "inventory") pod "42f73c0e-3d0e-4ba2-aa05-c1547471b938" (UID: "42f73c0e-3d0e-4ba2-aa05-c1547471b938"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:59:14 crc kubenswrapper[4906]: I0227 08:59:14.733989 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42f73c0e-3d0e-4ba2-aa05-c1547471b938-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "42f73c0e-3d0e-4ba2-aa05-c1547471b938" (UID: "42f73c0e-3d0e-4ba2-aa05-c1547471b938"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 08:59:14 crc kubenswrapper[4906]: I0227 08:59:14.799773 4906 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/42f73c0e-3d0e-4ba2-aa05-c1547471b938-inventory\") on node \"crc\" DevicePath \"\"" Feb 27 08:59:14 crc kubenswrapper[4906]: I0227 08:59:14.799814 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8j6dm\" (UniqueName: \"kubernetes.io/projected/42f73c0e-3d0e-4ba2-aa05-c1547471b938-kube-api-access-8j6dm\") on node \"crc\" DevicePath \"\"" Feb 27 08:59:14 crc kubenswrapper[4906]: I0227 08:59:14.799826 4906 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/42f73c0e-3d0e-4ba2-aa05-c1547471b938-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 08:59:14 crc kubenswrapper[4906]: I0227 08:59:14.799839 4906 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/42f73c0e-3d0e-4ba2-aa05-c1547471b938-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.204520 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" event={"ID":"42f73c0e-3d0e-4ba2-aa05-c1547471b938","Type":"ContainerDied","Data":"5251561837ba0dedc79889db27bec5e65decffd8fa14cb20a4b8032cb15ccbc0"} Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.204581 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5251561837ba0dedc79889db27bec5e65decffd8fa14cb20a4b8032cb15ccbc0" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.204622 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.320424 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj"] Feb 27 08:59:15 crc kubenswrapper[4906]: E0227 08:59:15.321144 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854" containerName="oc" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.321173 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854" containerName="oc" Feb 27 08:59:15 crc kubenswrapper[4906]: E0227 08:59:15.321242 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42f73c0e-3d0e-4ba2-aa05-c1547471b938" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.321255 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="42f73c0e-3d0e-4ba2-aa05-c1547471b938" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.321523 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="42f73c0e-3d0e-4ba2-aa05-c1547471b938" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.321558 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854" containerName="oc" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.322444 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.326989 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.327122 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d466j" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.327277 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.327474 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.334581 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj"] Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.414812 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e632f9dc-ad86-45fb-8fcb-a35d8a92b07a-ssh-key-openstack-edpm-ipam\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj\" (UID: \"e632f9dc-ad86-45fb-8fcb-a35d8a92b07a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.415640 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e632f9dc-ad86-45fb-8fcb-a35d8a92b07a-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj\" (UID: \"e632f9dc-ad86-45fb-8fcb-a35d8a92b07a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.415676 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbpv4\" (UniqueName: \"kubernetes.io/projected/e632f9dc-ad86-45fb-8fcb-a35d8a92b07a-kube-api-access-kbpv4\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj\" (UID: \"e632f9dc-ad86-45fb-8fcb-a35d8a92b07a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.519089 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e632f9dc-ad86-45fb-8fcb-a35d8a92b07a-ssh-key-openstack-edpm-ipam\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj\" (UID: \"e632f9dc-ad86-45fb-8fcb-a35d8a92b07a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.519238 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e632f9dc-ad86-45fb-8fcb-a35d8a92b07a-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj\" (UID: \"e632f9dc-ad86-45fb-8fcb-a35d8a92b07a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.519268 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbpv4\" (UniqueName: \"kubernetes.io/projected/e632f9dc-ad86-45fb-8fcb-a35d8a92b07a-kube-api-access-kbpv4\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj\" (UID: \"e632f9dc-ad86-45fb-8fcb-a35d8a92b07a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.526659 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e632f9dc-ad86-45fb-8fcb-a35d8a92b07a-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj\" (UID: \"e632f9dc-ad86-45fb-8fcb-a35d8a92b07a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.526678 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e632f9dc-ad86-45fb-8fcb-a35d8a92b07a-ssh-key-openstack-edpm-ipam\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj\" (UID: \"e632f9dc-ad86-45fb-8fcb-a35d8a92b07a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.541996 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbpv4\" (UniqueName: \"kubernetes.io/projected/e632f9dc-ad86-45fb-8fcb-a35d8a92b07a-kube-api-access-kbpv4\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj\" (UID: \"e632f9dc-ad86-45fb-8fcb-a35d8a92b07a\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj" Feb 27 08:59:15 crc kubenswrapper[4906]: I0227 08:59:15.693760 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj" Feb 27 08:59:16 crc kubenswrapper[4906]: I0227 08:59:16.261405 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj"] Feb 27 08:59:17 crc kubenswrapper[4906]: I0227 08:59:17.226717 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj" event={"ID":"e632f9dc-ad86-45fb-8fcb-a35d8a92b07a","Type":"ContainerStarted","Data":"ae3de81c4b5ad512c580a52621492a7c4c1be47a26ea668a06e289d545278f74"} Feb 27 08:59:17 crc kubenswrapper[4906]: I0227 08:59:17.227326 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj" event={"ID":"e632f9dc-ad86-45fb-8fcb-a35d8a92b07a","Type":"ContainerStarted","Data":"cc04f7dde728a1504b0e87f30e903401646923c48107473cb42b7833c0da1cc2"} Feb 27 08:59:17 crc kubenswrapper[4906]: I0227 08:59:17.251257 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj" podStartSLOduration=1.754724778 podStartE2EDuration="2.251230468s" podCreationTimestamp="2026-02-27 08:59:15 +0000 UTC" firstStartedPulling="2026-02-27 08:59:16.266539366 +0000 UTC m=+1854.660940976" lastFinishedPulling="2026-02-27 08:59:16.763045056 +0000 UTC m=+1855.157446666" observedRunningTime="2026-02-27 08:59:17.24713135 +0000 UTC m=+1855.641532980" watchObservedRunningTime="2026-02-27 08:59:17.251230468 +0000 UTC m=+1855.645632078" Feb 27 08:59:24 crc kubenswrapper[4906]: I0227 08:59:24.045671 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-zgnvf"] Feb 27 08:59:24 crc kubenswrapper[4906]: I0227 08:59:24.063487 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-zgnvf"] Feb 27 08:59:24 crc kubenswrapper[4906]: I0227 08:59:24.567429 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b44567c9-a11c-4760-8ee3-43d4c96fa180" path="/var/lib/kubelet/pods/b44567c9-a11c-4760-8ee3-43d4c96fa180/volumes" Feb 27 08:59:26 crc kubenswrapper[4906]: I0227 08:59:26.552345 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 08:59:26 crc kubenswrapper[4906]: E0227 08:59:26.552990 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 08:59:40 crc kubenswrapper[4906]: I0227 08:59:40.552654 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 08:59:40 crc kubenswrapper[4906]: E0227 08:59:40.553374 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 08:59:41 crc kubenswrapper[4906]: I0227 08:59:41.042874 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-2a5c-account-create-update-vfl7s"] Feb 27 08:59:41 crc kubenswrapper[4906]: I0227 08:59:41.054795 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bb99-account-create-update-ppmpk"] Feb 27 08:59:41 crc kubenswrapper[4906]: I0227 08:59:41.066360 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-bx2nz"] Feb 27 08:59:41 crc kubenswrapper[4906]: I0227 08:59:41.078965 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-2a5c-account-create-update-vfl7s"] Feb 27 08:59:41 crc kubenswrapper[4906]: I0227 08:59:41.088547 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-1cea-account-create-update-s8q8n"] Feb 27 08:59:41 crc kubenswrapper[4906]: I0227 08:59:41.099796 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-w76h9"] Feb 27 08:59:41 crc kubenswrapper[4906]: I0227 08:59:41.108427 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-bx2nz"] Feb 27 08:59:41 crc kubenswrapper[4906]: I0227 08:59:41.118063 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bb99-account-create-update-ppmpk"] Feb 27 08:59:41 crc kubenswrapper[4906]: I0227 08:59:41.127786 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-w76h9"] Feb 27 08:59:41 crc kubenswrapper[4906]: I0227 08:59:41.139398 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-1cea-account-create-update-s8q8n"] Feb 27 08:59:42 crc kubenswrapper[4906]: I0227 08:59:42.568583 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0eca1824-e2a4-4bac-bc2d-8e2035dae7c7" path="/var/lib/kubelet/pods/0eca1824-e2a4-4bac-bc2d-8e2035dae7c7/volumes" Feb 27 08:59:42 crc kubenswrapper[4906]: I0227 08:59:42.569919 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="26a00786-e81f-488f-88fe-ffc98c7f63e2" path="/var/lib/kubelet/pods/26a00786-e81f-488f-88fe-ffc98c7f63e2/volumes" Feb 27 08:59:42 crc kubenswrapper[4906]: I0227 08:59:42.571843 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4d108c06-2f21-4a05-87ab-caccb04415b5" path="/var/lib/kubelet/pods/4d108c06-2f21-4a05-87ab-caccb04415b5/volumes" Feb 27 08:59:42 crc kubenswrapper[4906]: I0227 08:59:42.572695 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69ae1e99-afff-4a95-bc00-d1891e12976d" path="/var/lib/kubelet/pods/69ae1e99-afff-4a95-bc00-d1891e12976d/volumes" Feb 27 08:59:42 crc kubenswrapper[4906]: I0227 08:59:42.574654 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1fe6339-ed29-4e33-a161-12b6c001a7e3" path="/var/lib/kubelet/pods/c1fe6339-ed29-4e33-a161-12b6c001a7e3/volumes" Feb 27 08:59:49 crc kubenswrapper[4906]: I0227 08:59:49.054531 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-lp8rr"] Feb 27 08:59:49 crc kubenswrapper[4906]: I0227 08:59:49.074031 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-lp8rr"] Feb 27 08:59:50 crc kubenswrapper[4906]: I0227 08:59:50.047415 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-7zdvv"] Feb 27 08:59:50 crc kubenswrapper[4906]: I0227 08:59:50.060542 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-7zdvv"] Feb 27 08:59:50 crc kubenswrapper[4906]: I0227 08:59:50.566651 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81c574b4-61cc-4790-93c5-a08e34058cbe" path="/var/lib/kubelet/pods/81c574b4-61cc-4790-93c5-a08e34058cbe/volumes" Feb 27 08:59:50 crc kubenswrapper[4906]: I0227 08:59:50.567657 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8fb662f-4673-4364-ba6b-9ff02fa23b4e" path="/var/lib/kubelet/pods/c8fb662f-4673-4364-ba6b-9ff02fa23b4e/volumes" Feb 27 08:59:51 crc kubenswrapper[4906]: I0227 08:59:51.054994 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-7nq6g"] Feb 27 08:59:51 crc kubenswrapper[4906]: I0227 08:59:51.063364 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-0045-account-create-update-wqm68"] Feb 27 08:59:51 crc kubenswrapper[4906]: I0227 08:59:51.072473 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-7nq6g"] Feb 27 08:59:51 crc kubenswrapper[4906]: I0227 08:59:51.089394 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-0045-account-create-update-wqm68"] Feb 27 08:59:52 crc kubenswrapper[4906]: I0227 08:59:52.568618 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17434f9b-c5b6-4828-a71e-d8ebefaf9aba" path="/var/lib/kubelet/pods/17434f9b-c5b6-4828-a71e-d8ebefaf9aba/volumes" Feb 27 08:59:52 crc kubenswrapper[4906]: I0227 08:59:52.569633 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c0d44ec-1d13-43a5-90b9-14282b765fdc" path="/var/lib/kubelet/pods/1c0d44ec-1d13-43a5-90b9-14282b765fdc/volumes" Feb 27 08:59:55 crc kubenswrapper[4906]: I0227 08:59:55.552440 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 08:59:55 crc kubenswrapper[4906]: E0227 08:59:55.553165 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 08:59:58 crc kubenswrapper[4906]: I0227 08:59:58.042091 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-plwsw"] Feb 27 08:59:58 crc kubenswrapper[4906]: I0227 08:59:58.060176 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-plwsw"] Feb 27 08:59:58 crc kubenswrapper[4906]: I0227 08:59:58.563211 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c8ae872-f33a-4027-b4f4-ed0baf60a19f" path="/var/lib/kubelet/pods/5c8ae872-f33a-4027-b4f4-ed0baf60a19f/volumes" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.152521 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536380-lvt5k"] Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.155693 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536380-lvt5k" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.157644 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.159784 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.160706 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.168440 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29536380-7hfht"] Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.170424 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29536380-7hfht" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.172561 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.172741 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.178991 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536380-lvt5k"] Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.189544 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29536380-7hfht"] Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.204745 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prkzk\" (UniqueName: \"kubernetes.io/projected/234a230d-4b25-4208-814b-9c883c68f128-kube-api-access-prkzk\") pod \"auto-csr-approver-29536380-lvt5k\" (UID: \"234a230d-4b25-4208-814b-9c883c68f128\") " pod="openshift-infra/auto-csr-approver-29536380-lvt5k" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.307329 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prkzk\" (UniqueName: \"kubernetes.io/projected/234a230d-4b25-4208-814b-9c883c68f128-kube-api-access-prkzk\") pod \"auto-csr-approver-29536380-lvt5k\" (UID: \"234a230d-4b25-4208-814b-9c883c68f128\") " pod="openshift-infra/auto-csr-approver-29536380-lvt5k" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.308019 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vs66g\" (UniqueName: \"kubernetes.io/projected/ad15d159-a96a-4408-98e7-bc7b673ae905-kube-api-access-vs66g\") pod \"collect-profiles-29536380-7hfht\" (UID: \"ad15d159-a96a-4408-98e7-bc7b673ae905\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536380-7hfht" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.308087 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad15d159-a96a-4408-98e7-bc7b673ae905-config-volume\") pod \"collect-profiles-29536380-7hfht\" (UID: \"ad15d159-a96a-4408-98e7-bc7b673ae905\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536380-7hfht" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.308206 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ad15d159-a96a-4408-98e7-bc7b673ae905-secret-volume\") pod \"collect-profiles-29536380-7hfht\" (UID: \"ad15d159-a96a-4408-98e7-bc7b673ae905\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536380-7hfht" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.410729 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vs66g\" (UniqueName: \"kubernetes.io/projected/ad15d159-a96a-4408-98e7-bc7b673ae905-kube-api-access-vs66g\") pod \"collect-profiles-29536380-7hfht\" (UID: \"ad15d159-a96a-4408-98e7-bc7b673ae905\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536380-7hfht" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.410821 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad15d159-a96a-4408-98e7-bc7b673ae905-config-volume\") pod \"collect-profiles-29536380-7hfht\" (UID: \"ad15d159-a96a-4408-98e7-bc7b673ae905\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536380-7hfht" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.410954 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ad15d159-a96a-4408-98e7-bc7b673ae905-secret-volume\") pod \"collect-profiles-29536380-7hfht\" (UID: \"ad15d159-a96a-4408-98e7-bc7b673ae905\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536380-7hfht" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.413964 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad15d159-a96a-4408-98e7-bc7b673ae905-config-volume\") pod \"collect-profiles-29536380-7hfht\" (UID: \"ad15d159-a96a-4408-98e7-bc7b673ae905\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536380-7hfht" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.420683 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prkzk\" (UniqueName: \"kubernetes.io/projected/234a230d-4b25-4208-814b-9c883c68f128-kube-api-access-prkzk\") pod \"auto-csr-approver-29536380-lvt5k\" (UID: \"234a230d-4b25-4208-814b-9c883c68f128\") " pod="openshift-infra/auto-csr-approver-29536380-lvt5k" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.420731 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ad15d159-a96a-4408-98e7-bc7b673ae905-secret-volume\") pod \"collect-profiles-29536380-7hfht\" (UID: \"ad15d159-a96a-4408-98e7-bc7b673ae905\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536380-7hfht" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.438180 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vs66g\" (UniqueName: \"kubernetes.io/projected/ad15d159-a96a-4408-98e7-bc7b673ae905-kube-api-access-vs66g\") pod \"collect-profiles-29536380-7hfht\" (UID: \"ad15d159-a96a-4408-98e7-bc7b673ae905\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536380-7hfht" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.513392 4906 scope.go:117] "RemoveContainer" containerID="8d87d3c3796598f91e5eba295fbc2c4f9f24fa68398c63042d8ec5bc4400fbf1" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.714276 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29536380-7hfht" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.715432 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536380-lvt5k" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.756013 4906 scope.go:117] "RemoveContainer" containerID="27300d3014c44c93b3e33ddff36683a17ed53775e270bf2a9fa6521738b5f303" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.892336 4906 scope.go:117] "RemoveContainer" containerID="9b35daa687f175c3ecbdd4fc565e08e63d823d2088c43e55069f5fc4657b63d8" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.931821 4906 scope.go:117] "RemoveContainer" containerID="a68840212c3d548fdbe7cf32a761361a560bcb1df3ef9e2967c2580f4347d649" Feb 27 09:00:00 crc kubenswrapper[4906]: I0227 09:00:00.965681 4906 scope.go:117] "RemoveContainer" containerID="169913695a138bd02677d1e20fa011f154144a9f4deab2e47ead527cf5681537" Feb 27 09:00:01 crc kubenswrapper[4906]: I0227 09:00:01.010778 4906 scope.go:117] "RemoveContainer" containerID="789a063d212389d5c48795acd28ea9d6e627e9514c7541c5ef82a3f130bc87e6" Feb 27 09:00:01 crc kubenswrapper[4906]: I0227 09:00:01.042456 4906 scope.go:117] "RemoveContainer" containerID="11e7587ebabfb6f857126ea2c87788cfa2f8da5ab1ce6d8dcddcd1749aaa076e" Feb 27 09:00:01 crc kubenswrapper[4906]: I0227 09:00:01.108092 4906 scope.go:117] "RemoveContainer" containerID="cc88132b664db7e01dd9c0d0a568257692b261fa19a6d9b1d02e77ef161c86b8" Feb 27 09:00:01 crc kubenswrapper[4906]: I0227 09:00:01.148663 4906 scope.go:117] "RemoveContainer" containerID="d8339b31fbe813b2b890b166fe48a7cecf5924a9cd5ee98c6afb5d41c3bfce3f" Feb 27 09:00:01 crc kubenswrapper[4906]: I0227 09:00:01.177552 4906 scope.go:117] "RemoveContainer" containerID="f09d019ee4bc3645d19f3ec00bdfeeec67de0399d70f87e08cd0a02989271a7b" Feb 27 09:00:01 crc kubenswrapper[4906]: I0227 09:00:01.202439 4906 scope.go:117] "RemoveContainer" containerID="036ffbec054a33b6847ce56b5326ba4cecddefe32239438ce69e33d8ce6e3e99" Feb 27 09:00:01 crc kubenswrapper[4906]: I0227 09:00:01.249194 4906 scope.go:117] "RemoveContainer" containerID="79cf6d7092e960d31ba6e89407c1cdc3e677efa8fb3204903d642d98d4e61764" Feb 27 09:00:01 crc kubenswrapper[4906]: I0227 09:00:01.290666 4906 scope.go:117] "RemoveContainer" containerID="b39c9bfcb5b446991e94a358e7100f9f8bdc72d5b27edcef064189c1c479f1d5" Feb 27 09:00:01 crc kubenswrapper[4906]: I0227 09:00:01.363210 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29536380-7hfht"] Feb 27 09:00:01 crc kubenswrapper[4906]: I0227 09:00:01.444685 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536380-lvt5k"] Feb 27 09:00:01 crc kubenswrapper[4906]: W0227 09:00:01.473810 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod234a230d_4b25_4208_814b_9c883c68f128.slice/crio-8047ae3c6c010b310ea68f70a9575f48dcf5ae3fd2027cab7c8f3da9368cee4d WatchSource:0}: Error finding container 8047ae3c6c010b310ea68f70a9575f48dcf5ae3fd2027cab7c8f3da9368cee4d: Status 404 returned error can't find the container with id 8047ae3c6c010b310ea68f70a9575f48dcf5ae3fd2027cab7c8f3da9368cee4d Feb 27 09:00:01 crc kubenswrapper[4906]: I0227 09:00:01.743964 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536380-lvt5k" event={"ID":"234a230d-4b25-4208-814b-9c883c68f128","Type":"ContainerStarted","Data":"8047ae3c6c010b310ea68f70a9575f48dcf5ae3fd2027cab7c8f3da9368cee4d"} Feb 27 09:00:01 crc kubenswrapper[4906]: I0227 09:00:01.745338 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29536380-7hfht" event={"ID":"ad15d159-a96a-4408-98e7-bc7b673ae905","Type":"ContainerStarted","Data":"7da1e6f5cc87faf83188f451897b3fb2c6441f38d3097a66974d0f23c176401b"} Feb 27 09:00:02 crc kubenswrapper[4906]: I0227 09:00:02.759637 4906 generic.go:334] "Generic (PLEG): container finished" podID="ad15d159-a96a-4408-98e7-bc7b673ae905" containerID="51f0a929fed938952e388b89c0a2e52b3c23fa1e41917bb5191431b5d5ffaf8c" exitCode=0 Feb 27 09:00:02 crc kubenswrapper[4906]: I0227 09:00:02.760072 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29536380-7hfht" event={"ID":"ad15d159-a96a-4408-98e7-bc7b673ae905","Type":"ContainerDied","Data":"51f0a929fed938952e388b89c0a2e52b3c23fa1e41917bb5191431b5d5ffaf8c"} Feb 27 09:00:04 crc kubenswrapper[4906]: I0227 09:00:04.128463 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29536380-7hfht" Feb 27 09:00:04 crc kubenswrapper[4906]: I0227 09:00:04.224369 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vs66g\" (UniqueName: \"kubernetes.io/projected/ad15d159-a96a-4408-98e7-bc7b673ae905-kube-api-access-vs66g\") pod \"ad15d159-a96a-4408-98e7-bc7b673ae905\" (UID: \"ad15d159-a96a-4408-98e7-bc7b673ae905\") " Feb 27 09:00:04 crc kubenswrapper[4906]: I0227 09:00:04.224499 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad15d159-a96a-4408-98e7-bc7b673ae905-config-volume\") pod \"ad15d159-a96a-4408-98e7-bc7b673ae905\" (UID: \"ad15d159-a96a-4408-98e7-bc7b673ae905\") " Feb 27 09:00:04 crc kubenswrapper[4906]: I0227 09:00:04.224782 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ad15d159-a96a-4408-98e7-bc7b673ae905-secret-volume\") pod \"ad15d159-a96a-4408-98e7-bc7b673ae905\" (UID: \"ad15d159-a96a-4408-98e7-bc7b673ae905\") " Feb 27 09:00:04 crc kubenswrapper[4906]: I0227 09:00:04.225324 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ad15d159-a96a-4408-98e7-bc7b673ae905-config-volume" (OuterVolumeSpecName: "config-volume") pod "ad15d159-a96a-4408-98e7-bc7b673ae905" (UID: "ad15d159-a96a-4408-98e7-bc7b673ae905"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 09:00:04 crc kubenswrapper[4906]: I0227 09:00:04.225550 4906 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ad15d159-a96a-4408-98e7-bc7b673ae905-config-volume\") on node \"crc\" DevicePath \"\"" Feb 27 09:00:04 crc kubenswrapper[4906]: I0227 09:00:04.232169 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad15d159-a96a-4408-98e7-bc7b673ae905-kube-api-access-vs66g" (OuterVolumeSpecName: "kube-api-access-vs66g") pod "ad15d159-a96a-4408-98e7-bc7b673ae905" (UID: "ad15d159-a96a-4408-98e7-bc7b673ae905"). InnerVolumeSpecName "kube-api-access-vs66g". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:00:04 crc kubenswrapper[4906]: I0227 09:00:04.232217 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ad15d159-a96a-4408-98e7-bc7b673ae905-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ad15d159-a96a-4408-98e7-bc7b673ae905" (UID: "ad15d159-a96a-4408-98e7-bc7b673ae905"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:00:04 crc kubenswrapper[4906]: I0227 09:00:04.328735 4906 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ad15d159-a96a-4408-98e7-bc7b673ae905-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 27 09:00:04 crc kubenswrapper[4906]: I0227 09:00:04.328810 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vs66g\" (UniqueName: \"kubernetes.io/projected/ad15d159-a96a-4408-98e7-bc7b673ae905-kube-api-access-vs66g\") on node \"crc\" DevicePath \"\"" Feb 27 09:00:04 crc kubenswrapper[4906]: I0227 09:00:04.786789 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29536380-7hfht" event={"ID":"ad15d159-a96a-4408-98e7-bc7b673ae905","Type":"ContainerDied","Data":"7da1e6f5cc87faf83188f451897b3fb2c6441f38d3097a66974d0f23c176401b"} Feb 27 09:00:04 crc kubenswrapper[4906]: I0227 09:00:04.786859 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7da1e6f5cc87faf83188f451897b3fb2c6441f38d3097a66974d0f23c176401b" Feb 27 09:00:04 crc kubenswrapper[4906]: I0227 09:00:04.787471 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29536380-7hfht" Feb 27 09:00:07 crc kubenswrapper[4906]: I0227 09:00:07.043679 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-1668-account-create-update-q4g7s"] Feb 27 09:00:07 crc kubenswrapper[4906]: I0227 09:00:07.055715 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-1668-account-create-update-q4g7s"] Feb 27 09:00:07 crc kubenswrapper[4906]: I0227 09:00:07.551777 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 09:00:07 crc kubenswrapper[4906]: E0227 09:00:07.552209 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:00:08 crc kubenswrapper[4906]: I0227 09:00:08.035087 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-c732-account-create-update-srkl5"] Feb 27 09:00:08 crc kubenswrapper[4906]: I0227 09:00:08.050120 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-c732-account-create-update-srkl5"] Feb 27 09:00:08 crc kubenswrapper[4906]: I0227 09:00:08.566746 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e0d50eb-c3dc-456b-9966-493e163facdf" path="/var/lib/kubelet/pods/0e0d50eb-c3dc-456b-9966-493e163facdf/volumes" Feb 27 09:00:08 crc kubenswrapper[4906]: I0227 09:00:08.567761 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc216e22-3106-40fb-9f13-fddc321b8394" path="/var/lib/kubelet/pods/cc216e22-3106-40fb-9f13-fddc321b8394/volumes" Feb 27 09:00:11 crc kubenswrapper[4906]: I0227 09:00:11.873858 4906 generic.go:334] "Generic (PLEG): container finished" podID="234a230d-4b25-4208-814b-9c883c68f128" containerID="5d02da8cfa5ebbc27ff6e76a1e4c21a95fc5d6799dfe41f360cb1fdc8e0b1387" exitCode=0 Feb 27 09:00:11 crc kubenswrapper[4906]: I0227 09:00:11.873982 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536380-lvt5k" event={"ID":"234a230d-4b25-4208-814b-9c883c68f128","Type":"ContainerDied","Data":"5d02da8cfa5ebbc27ff6e76a1e4c21a95fc5d6799dfe41f360cb1fdc8e0b1387"} Feb 27 09:00:12 crc kubenswrapper[4906]: I0227 09:00:12.051237 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-xhr9h"] Feb 27 09:00:12 crc kubenswrapper[4906]: I0227 09:00:12.066938 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-xhr9h"] Feb 27 09:00:12 crc kubenswrapper[4906]: I0227 09:00:12.572130 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2bebd962-3db8-45c9-9de7-a96a207a8b12" path="/var/lib/kubelet/pods/2bebd962-3db8-45c9-9de7-a96a207a8b12/volumes" Feb 27 09:00:13 crc kubenswrapper[4906]: I0227 09:00:13.272335 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536380-lvt5k" Feb 27 09:00:13 crc kubenswrapper[4906]: I0227 09:00:13.359384 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prkzk\" (UniqueName: \"kubernetes.io/projected/234a230d-4b25-4208-814b-9c883c68f128-kube-api-access-prkzk\") pod \"234a230d-4b25-4208-814b-9c883c68f128\" (UID: \"234a230d-4b25-4208-814b-9c883c68f128\") " Feb 27 09:00:13 crc kubenswrapper[4906]: I0227 09:00:13.368552 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/234a230d-4b25-4208-814b-9c883c68f128-kube-api-access-prkzk" (OuterVolumeSpecName: "kube-api-access-prkzk") pod "234a230d-4b25-4208-814b-9c883c68f128" (UID: "234a230d-4b25-4208-814b-9c883c68f128"). InnerVolumeSpecName "kube-api-access-prkzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:00:13 crc kubenswrapper[4906]: I0227 09:00:13.461872 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prkzk\" (UniqueName: \"kubernetes.io/projected/234a230d-4b25-4208-814b-9c883c68f128-kube-api-access-prkzk\") on node \"crc\" DevicePath \"\"" Feb 27 09:00:13 crc kubenswrapper[4906]: I0227 09:00:13.903749 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536380-lvt5k" event={"ID":"234a230d-4b25-4208-814b-9c883c68f128","Type":"ContainerDied","Data":"8047ae3c6c010b310ea68f70a9575f48dcf5ae3fd2027cab7c8f3da9368cee4d"} Feb 27 09:00:13 crc kubenswrapper[4906]: I0227 09:00:13.903816 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8047ae3c6c010b310ea68f70a9575f48dcf5ae3fd2027cab7c8f3da9368cee4d" Feb 27 09:00:13 crc kubenswrapper[4906]: I0227 09:00:13.903825 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536380-lvt5k" Feb 27 09:00:14 crc kubenswrapper[4906]: I0227 09:00:14.353960 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536374-vk6qp"] Feb 27 09:00:14 crc kubenswrapper[4906]: I0227 09:00:14.367527 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536374-vk6qp"] Feb 27 09:00:14 crc kubenswrapper[4906]: I0227 09:00:14.564683 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d28859e-464e-4266-89aa-2318bc051b40" path="/var/lib/kubelet/pods/0d28859e-464e-4266-89aa-2318bc051b40/volumes" Feb 27 09:00:18 crc kubenswrapper[4906]: I0227 09:00:18.554698 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 09:00:18 crc kubenswrapper[4906]: E0227 09:00:18.555823 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:00:33 crc kubenswrapper[4906]: I0227 09:00:33.552678 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 09:00:33 crc kubenswrapper[4906]: E0227 09:00:33.554062 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:00:44 crc kubenswrapper[4906]: I0227 09:00:44.552836 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 09:00:44 crc kubenswrapper[4906]: E0227 09:00:44.553704 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:00:55 crc kubenswrapper[4906]: I0227 09:00:55.552663 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 09:00:55 crc kubenswrapper[4906]: E0227 09:00:55.553301 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:00:57 crc kubenswrapper[4906]: I0227 09:00:57.047444 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-q4qfq"] Feb 27 09:00:57 crc kubenswrapper[4906]: I0227 09:00:57.056598 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-q4qfq"] Feb 27 09:00:58 crc kubenswrapper[4906]: I0227 09:00:58.566421 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aaaf3558-92de-434c-825f-cb5a828a19fe" path="/var/lib/kubelet/pods/aaaf3558-92de-434c-825f-cb5a828a19fe/volumes" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.039528 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-67z26"] Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.048207 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-67z26"] Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.160183 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29536381-qq67w"] Feb 27 09:01:00 crc kubenswrapper[4906]: E0227 09:01:00.160813 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad15d159-a96a-4408-98e7-bc7b673ae905" containerName="collect-profiles" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.160837 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad15d159-a96a-4408-98e7-bc7b673ae905" containerName="collect-profiles" Feb 27 09:01:00 crc kubenswrapper[4906]: E0227 09:01:00.160875 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="234a230d-4b25-4208-814b-9c883c68f128" containerName="oc" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.160907 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="234a230d-4b25-4208-814b-9c883c68f128" containerName="oc" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.161127 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="234a230d-4b25-4208-814b-9c883c68f128" containerName="oc" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.161155 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad15d159-a96a-4408-98e7-bc7b673ae905" containerName="collect-profiles" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.161987 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29536381-qq67w" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.173595 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29536381-qq67w"] Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.237293 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/358c1490-223c-4b95-87fb-279305744869-combined-ca-bundle\") pod \"keystone-cron-29536381-qq67w\" (UID: \"358c1490-223c-4b95-87fb-279305744869\") " pod="openstack/keystone-cron-29536381-qq67w" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.237376 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x46nq\" (UniqueName: \"kubernetes.io/projected/358c1490-223c-4b95-87fb-279305744869-kube-api-access-x46nq\") pod \"keystone-cron-29536381-qq67w\" (UID: \"358c1490-223c-4b95-87fb-279305744869\") " pod="openstack/keystone-cron-29536381-qq67w" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.237424 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/358c1490-223c-4b95-87fb-279305744869-config-data\") pod \"keystone-cron-29536381-qq67w\" (UID: \"358c1490-223c-4b95-87fb-279305744869\") " pod="openstack/keystone-cron-29536381-qq67w" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.237449 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/358c1490-223c-4b95-87fb-279305744869-fernet-keys\") pod \"keystone-cron-29536381-qq67w\" (UID: \"358c1490-223c-4b95-87fb-279305744869\") " pod="openstack/keystone-cron-29536381-qq67w" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.340161 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/358c1490-223c-4b95-87fb-279305744869-combined-ca-bundle\") pod \"keystone-cron-29536381-qq67w\" (UID: \"358c1490-223c-4b95-87fb-279305744869\") " pod="openstack/keystone-cron-29536381-qq67w" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.340250 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x46nq\" (UniqueName: \"kubernetes.io/projected/358c1490-223c-4b95-87fb-279305744869-kube-api-access-x46nq\") pod \"keystone-cron-29536381-qq67w\" (UID: \"358c1490-223c-4b95-87fb-279305744869\") " pod="openstack/keystone-cron-29536381-qq67w" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.340308 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/358c1490-223c-4b95-87fb-279305744869-config-data\") pod \"keystone-cron-29536381-qq67w\" (UID: \"358c1490-223c-4b95-87fb-279305744869\") " pod="openstack/keystone-cron-29536381-qq67w" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.340333 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/358c1490-223c-4b95-87fb-279305744869-fernet-keys\") pod \"keystone-cron-29536381-qq67w\" (UID: \"358c1490-223c-4b95-87fb-279305744869\") " pod="openstack/keystone-cron-29536381-qq67w" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.348685 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/358c1490-223c-4b95-87fb-279305744869-fernet-keys\") pod \"keystone-cron-29536381-qq67w\" (UID: \"358c1490-223c-4b95-87fb-279305744869\") " pod="openstack/keystone-cron-29536381-qq67w" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.350896 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/358c1490-223c-4b95-87fb-279305744869-combined-ca-bundle\") pod \"keystone-cron-29536381-qq67w\" (UID: \"358c1490-223c-4b95-87fb-279305744869\") " pod="openstack/keystone-cron-29536381-qq67w" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.351297 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/358c1490-223c-4b95-87fb-279305744869-config-data\") pod \"keystone-cron-29536381-qq67w\" (UID: \"358c1490-223c-4b95-87fb-279305744869\") " pod="openstack/keystone-cron-29536381-qq67w" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.369215 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x46nq\" (UniqueName: \"kubernetes.io/projected/358c1490-223c-4b95-87fb-279305744869-kube-api-access-x46nq\") pod \"keystone-cron-29536381-qq67w\" (UID: \"358c1490-223c-4b95-87fb-279305744869\") " pod="openstack/keystone-cron-29536381-qq67w" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.481275 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29536381-qq67w" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.569072 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="128b6d95-fa07-4ab9-a927-47882c406fa3" path="/var/lib/kubelet/pods/128b6d95-fa07-4ab9-a927-47882c406fa3/volumes" Feb 27 09:01:00 crc kubenswrapper[4906]: I0227 09:01:00.978198 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29536381-qq67w"] Feb 27 09:01:01 crc kubenswrapper[4906]: I0227 09:01:01.418921 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29536381-qq67w" event={"ID":"358c1490-223c-4b95-87fb-279305744869","Type":"ContainerStarted","Data":"9b3335bafbd85a9710066975517e3d057df5bcc146dfc5f4c2ba3685e6187012"} Feb 27 09:01:01 crc kubenswrapper[4906]: I0227 09:01:01.418980 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29536381-qq67w" event={"ID":"358c1490-223c-4b95-87fb-279305744869","Type":"ContainerStarted","Data":"dc0dd3ba0b189e75ebe7ec515abcb2bccfa846c63694a7a853e1a91c0e31d6e9"} Feb 27 09:01:01 crc kubenswrapper[4906]: I0227 09:01:01.446502 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29536381-qq67w" podStartSLOduration=1.4464720469999999 podStartE2EDuration="1.446472047s" podCreationTimestamp="2026-02-27 09:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-27 09:01:01.443311784 +0000 UTC m=+1959.837713414" watchObservedRunningTime="2026-02-27 09:01:01.446472047 +0000 UTC m=+1959.840873657" Feb 27 09:01:01 crc kubenswrapper[4906]: I0227 09:01:01.673094 4906 scope.go:117] "RemoveContainer" containerID="ff74f8c1c239517610ae82fe9c54e8f2cdc13655273903ddb470f0835bec89f0" Feb 27 09:01:01 crc kubenswrapper[4906]: I0227 09:01:01.735598 4906 scope.go:117] "RemoveContainer" containerID="8ca46ee208ce75fb2d5814027ef4cef40242d734440d427136e09997625c9234" Feb 27 09:01:01 crc kubenswrapper[4906]: I0227 09:01:01.785366 4906 scope.go:117] "RemoveContainer" containerID="603bfcd590be81ba2ee40841d59a8b26694906f7e62cd5d651bf832bf01557ac" Feb 27 09:01:01 crc kubenswrapper[4906]: I0227 09:01:01.855774 4906 scope.go:117] "RemoveContainer" containerID="e7ef7b403767c0dd4412bfaa469ab7da53a63e63af0a30adebf41d367d3e3822" Feb 27 09:01:01 crc kubenswrapper[4906]: I0227 09:01:01.895603 4906 scope.go:117] "RemoveContainer" containerID="ec1256f38f1b6bf03ad42d99f256f3bc755667f67925ff9b325a4878eba95298" Feb 27 09:01:01 crc kubenswrapper[4906]: I0227 09:01:01.961100 4906 scope.go:117] "RemoveContainer" containerID="cfc93b65268e8c5f4dc0495ac26f3b5aac10ecf8e070fdee100aeb5197266e83" Feb 27 09:01:04 crc kubenswrapper[4906]: I0227 09:01:04.450850 4906 generic.go:334] "Generic (PLEG): container finished" podID="358c1490-223c-4b95-87fb-279305744869" containerID="9b3335bafbd85a9710066975517e3d057df5bcc146dfc5f4c2ba3685e6187012" exitCode=0 Feb 27 09:01:04 crc kubenswrapper[4906]: I0227 09:01:04.450951 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29536381-qq67w" event={"ID":"358c1490-223c-4b95-87fb-279305744869","Type":"ContainerDied","Data":"9b3335bafbd85a9710066975517e3d057df5bcc146dfc5f4c2ba3685e6187012"} Feb 27 09:01:05 crc kubenswrapper[4906]: I0227 09:01:05.066224 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-jjnnc"] Feb 27 09:01:05 crc kubenswrapper[4906]: I0227 09:01:05.077298 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-jjnnc"] Feb 27 09:01:05 crc kubenswrapper[4906]: I0227 09:01:05.464403 4906 generic.go:334] "Generic (PLEG): container finished" podID="e632f9dc-ad86-45fb-8fcb-a35d8a92b07a" containerID="ae3de81c4b5ad512c580a52621492a7c4c1be47a26ea668a06e289d545278f74" exitCode=0 Feb 27 09:01:05 crc kubenswrapper[4906]: I0227 09:01:05.464500 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj" event={"ID":"e632f9dc-ad86-45fb-8fcb-a35d8a92b07a","Type":"ContainerDied","Data":"ae3de81c4b5ad512c580a52621492a7c4c1be47a26ea668a06e289d545278f74"} Feb 27 09:01:05 crc kubenswrapper[4906]: I0227 09:01:05.820064 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29536381-qq67w" Feb 27 09:01:05 crc kubenswrapper[4906]: I0227 09:01:05.897080 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/358c1490-223c-4b95-87fb-279305744869-combined-ca-bundle\") pod \"358c1490-223c-4b95-87fb-279305744869\" (UID: \"358c1490-223c-4b95-87fb-279305744869\") " Feb 27 09:01:05 crc kubenswrapper[4906]: I0227 09:01:05.897259 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/358c1490-223c-4b95-87fb-279305744869-fernet-keys\") pod \"358c1490-223c-4b95-87fb-279305744869\" (UID: \"358c1490-223c-4b95-87fb-279305744869\") " Feb 27 09:01:05 crc kubenswrapper[4906]: I0227 09:01:05.897308 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x46nq\" (UniqueName: \"kubernetes.io/projected/358c1490-223c-4b95-87fb-279305744869-kube-api-access-x46nq\") pod \"358c1490-223c-4b95-87fb-279305744869\" (UID: \"358c1490-223c-4b95-87fb-279305744869\") " Feb 27 09:01:05 crc kubenswrapper[4906]: I0227 09:01:05.897346 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/358c1490-223c-4b95-87fb-279305744869-config-data\") pod \"358c1490-223c-4b95-87fb-279305744869\" (UID: \"358c1490-223c-4b95-87fb-279305744869\") " Feb 27 09:01:05 crc kubenswrapper[4906]: I0227 09:01:05.904099 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/358c1490-223c-4b95-87fb-279305744869-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "358c1490-223c-4b95-87fb-279305744869" (UID: "358c1490-223c-4b95-87fb-279305744869"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:01:05 crc kubenswrapper[4906]: I0227 09:01:05.915545 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/358c1490-223c-4b95-87fb-279305744869-kube-api-access-x46nq" (OuterVolumeSpecName: "kube-api-access-x46nq") pod "358c1490-223c-4b95-87fb-279305744869" (UID: "358c1490-223c-4b95-87fb-279305744869"). InnerVolumeSpecName "kube-api-access-x46nq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:01:05 crc kubenswrapper[4906]: I0227 09:01:05.931777 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/358c1490-223c-4b95-87fb-279305744869-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "358c1490-223c-4b95-87fb-279305744869" (UID: "358c1490-223c-4b95-87fb-279305744869"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:01:05 crc kubenswrapper[4906]: I0227 09:01:05.954991 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/358c1490-223c-4b95-87fb-279305744869-config-data" (OuterVolumeSpecName: "config-data") pod "358c1490-223c-4b95-87fb-279305744869" (UID: "358c1490-223c-4b95-87fb-279305744869"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:01:06 crc kubenswrapper[4906]: I0227 09:01:06.000309 4906 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/358c1490-223c-4b95-87fb-279305744869-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 09:01:06 crc kubenswrapper[4906]: I0227 09:01:06.000347 4906 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/358c1490-223c-4b95-87fb-279305744869-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 27 09:01:06 crc kubenswrapper[4906]: I0227 09:01:06.000363 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x46nq\" (UniqueName: \"kubernetes.io/projected/358c1490-223c-4b95-87fb-279305744869-kube-api-access-x46nq\") on node \"crc\" DevicePath \"\"" Feb 27 09:01:06 crc kubenswrapper[4906]: I0227 09:01:06.000378 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/358c1490-223c-4b95-87fb-279305744869-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 09:01:06 crc kubenswrapper[4906]: I0227 09:01:06.481193 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29536381-qq67w" event={"ID":"358c1490-223c-4b95-87fb-279305744869","Type":"ContainerDied","Data":"dc0dd3ba0b189e75ebe7ec515abcb2bccfa846c63694a7a853e1a91c0e31d6e9"} Feb 27 09:01:06 crc kubenswrapper[4906]: I0227 09:01:06.481266 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc0dd3ba0b189e75ebe7ec515abcb2bccfa846c63694a7a853e1a91c0e31d6e9" Feb 27 09:01:06 crc kubenswrapper[4906]: I0227 09:01:06.482342 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29536381-qq67w" Feb 27 09:01:06 crc kubenswrapper[4906]: I0227 09:01:06.553525 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 09:01:06 crc kubenswrapper[4906]: E0227 09:01:06.554461 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:01:06 crc kubenswrapper[4906]: I0227 09:01:06.567776 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7" path="/var/lib/kubelet/pods/3ebf0ece-7396-4a55-b1ad-9eaf3fece3f7/volumes" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.024580 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.123185 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kbpv4\" (UniqueName: \"kubernetes.io/projected/e632f9dc-ad86-45fb-8fcb-a35d8a92b07a-kube-api-access-kbpv4\") pod \"e632f9dc-ad86-45fb-8fcb-a35d8a92b07a\" (UID: \"e632f9dc-ad86-45fb-8fcb-a35d8a92b07a\") " Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.123320 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e632f9dc-ad86-45fb-8fcb-a35d8a92b07a-ssh-key-openstack-edpm-ipam\") pod \"e632f9dc-ad86-45fb-8fcb-a35d8a92b07a\" (UID: \"e632f9dc-ad86-45fb-8fcb-a35d8a92b07a\") " Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.123525 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e632f9dc-ad86-45fb-8fcb-a35d8a92b07a-inventory\") pod \"e632f9dc-ad86-45fb-8fcb-a35d8a92b07a\" (UID: \"e632f9dc-ad86-45fb-8fcb-a35d8a92b07a\") " Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.128685 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e632f9dc-ad86-45fb-8fcb-a35d8a92b07a-kube-api-access-kbpv4" (OuterVolumeSpecName: "kube-api-access-kbpv4") pod "e632f9dc-ad86-45fb-8fcb-a35d8a92b07a" (UID: "e632f9dc-ad86-45fb-8fcb-a35d8a92b07a"). InnerVolumeSpecName "kube-api-access-kbpv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.151241 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e632f9dc-ad86-45fb-8fcb-a35d8a92b07a-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "e632f9dc-ad86-45fb-8fcb-a35d8a92b07a" (UID: "e632f9dc-ad86-45fb-8fcb-a35d8a92b07a"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.156743 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e632f9dc-ad86-45fb-8fcb-a35d8a92b07a-inventory" (OuterVolumeSpecName: "inventory") pod "e632f9dc-ad86-45fb-8fcb-a35d8a92b07a" (UID: "e632f9dc-ad86-45fb-8fcb-a35d8a92b07a"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.228715 4906 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e632f9dc-ad86-45fb-8fcb-a35d8a92b07a-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.228753 4906 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e632f9dc-ad86-45fb-8fcb-a35d8a92b07a-inventory\") on node \"crc\" DevicePath \"\"" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.228763 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kbpv4\" (UniqueName: \"kubernetes.io/projected/e632f9dc-ad86-45fb-8fcb-a35d8a92b07a-kube-api-access-kbpv4\") on node \"crc\" DevicePath \"\"" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.491798 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj" event={"ID":"e632f9dc-ad86-45fb-8fcb-a35d8a92b07a","Type":"ContainerDied","Data":"cc04f7dde728a1504b0e87f30e903401646923c48107473cb42b7833c0da1cc2"} Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.491843 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cc04f7dde728a1504b0e87f30e903401646923c48107473cb42b7833c0da1cc2" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.492027 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.566389 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n"] Feb 27 09:01:07 crc kubenswrapper[4906]: E0227 09:01:07.566985 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e632f9dc-ad86-45fb-8fcb-a35d8a92b07a" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.567016 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="e632f9dc-ad86-45fb-8fcb-a35d8a92b07a" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Feb 27 09:01:07 crc kubenswrapper[4906]: E0227 09:01:07.567094 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="358c1490-223c-4b95-87fb-279305744869" containerName="keystone-cron" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.567105 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="358c1490-223c-4b95-87fb-279305744869" containerName="keystone-cron" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.574405 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="358c1490-223c-4b95-87fb-279305744869" containerName="keystone-cron" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.574468 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="e632f9dc-ad86-45fb-8fcb-a35d8a92b07a" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.575627 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.580214 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.580824 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.580241 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d466j" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.582734 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n"] Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.583656 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.646871 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzfrn\" (UniqueName: \"kubernetes.io/projected/e77dee0c-6c62-4257-b0cc-7c4befd35e69-kube-api-access-qzfrn\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n\" (UID: \"e77dee0c-6c62-4257-b0cc-7c4befd35e69\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.646972 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e77dee0c-6c62-4257-b0cc-7c4befd35e69-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n\" (UID: \"e77dee0c-6c62-4257-b0cc-7c4befd35e69\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.647079 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e77dee0c-6c62-4257-b0cc-7c4befd35e69-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n\" (UID: \"e77dee0c-6c62-4257-b0cc-7c4befd35e69\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.749834 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzfrn\" (UniqueName: \"kubernetes.io/projected/e77dee0c-6c62-4257-b0cc-7c4befd35e69-kube-api-access-qzfrn\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n\" (UID: \"e77dee0c-6c62-4257-b0cc-7c4befd35e69\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.749930 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e77dee0c-6c62-4257-b0cc-7c4befd35e69-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n\" (UID: \"e77dee0c-6c62-4257-b0cc-7c4befd35e69\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.749994 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e77dee0c-6c62-4257-b0cc-7c4befd35e69-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n\" (UID: \"e77dee0c-6c62-4257-b0cc-7c4befd35e69\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.754928 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e77dee0c-6c62-4257-b0cc-7c4befd35e69-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n\" (UID: \"e77dee0c-6c62-4257-b0cc-7c4befd35e69\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.755402 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e77dee0c-6c62-4257-b0cc-7c4befd35e69-ssh-key-openstack-edpm-ipam\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n\" (UID: \"e77dee0c-6c62-4257-b0cc-7c4befd35e69\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.769618 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzfrn\" (UniqueName: \"kubernetes.io/projected/e77dee0c-6c62-4257-b0cc-7c4befd35e69-kube-api-access-qzfrn\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n\" (UID: \"e77dee0c-6c62-4257-b0cc-7c4befd35e69\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n" Feb 27 09:01:07 crc kubenswrapper[4906]: I0227 09:01:07.919335 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n" Feb 27 09:01:08 crc kubenswrapper[4906]: I0227 09:01:08.461946 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n"] Feb 27 09:01:08 crc kubenswrapper[4906]: I0227 09:01:08.507344 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n" event={"ID":"e77dee0c-6c62-4257-b0cc-7c4befd35e69","Type":"ContainerStarted","Data":"42c7937c2b38c4eb4760d7c58862720d6085972c6f3e338e43b0ce2c942d1004"} Feb 27 09:01:10 crc kubenswrapper[4906]: I0227 09:01:10.530909 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n" event={"ID":"e77dee0c-6c62-4257-b0cc-7c4befd35e69","Type":"ContainerStarted","Data":"4709ebd590e4d02ca60ceb3a6c573a22674343f61d614589bf809b550856aba6"} Feb 27 09:01:10 crc kubenswrapper[4906]: I0227 09:01:10.555638 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n" podStartSLOduration=2.480068823 podStartE2EDuration="3.555616677s" podCreationTimestamp="2026-02-27 09:01:07 +0000 UTC" firstStartedPulling="2026-02-27 09:01:08.469749864 +0000 UTC m=+1966.864151474" lastFinishedPulling="2026-02-27 09:01:09.545297718 +0000 UTC m=+1967.939699328" observedRunningTime="2026-02-27 09:01:10.546536338 +0000 UTC m=+1968.940937948" watchObservedRunningTime="2026-02-27 09:01:10.555616677 +0000 UTC m=+1968.950018277" Feb 27 09:01:15 crc kubenswrapper[4906]: I0227 09:01:15.052116 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-sgch9"] Feb 27 09:01:15 crc kubenswrapper[4906]: I0227 09:01:15.064670 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-sgch9"] Feb 27 09:01:16 crc kubenswrapper[4906]: I0227 09:01:16.575279 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2e1d22f-d9ac-467f-81f9-e4a4282f46aa" path="/var/lib/kubelet/pods/e2e1d22f-d9ac-467f-81f9-e4a4282f46aa/volumes" Feb 27 09:01:18 crc kubenswrapper[4906]: I0227 09:01:18.552204 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 09:01:18 crc kubenswrapper[4906]: E0227 09:01:18.552994 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:01:26 crc kubenswrapper[4906]: I0227 09:01:26.053148 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-d4fhc"] Feb 27 09:01:26 crc kubenswrapper[4906]: I0227 09:01:26.067382 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-d4fhc"] Feb 27 09:01:26 crc kubenswrapper[4906]: I0227 09:01:26.571150 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b36228db-b66d-4815-ac1c-e58b85ee3bbf" path="/var/lib/kubelet/pods/b36228db-b66d-4815-ac1c-e58b85ee3bbf/volumes" Feb 27 09:01:30 crc kubenswrapper[4906]: I0227 09:01:30.553603 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 09:01:30 crc kubenswrapper[4906]: E0227 09:01:30.554577 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:01:33 crc kubenswrapper[4906]: I0227 09:01:33.045701 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-pg9nd"] Feb 27 09:01:33 crc kubenswrapper[4906]: I0227 09:01:33.058483 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-pg9nd"] Feb 27 09:01:34 crc kubenswrapper[4906]: I0227 09:01:34.562436 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76d04662-7576-4f57-aca2-e118e5efd771" path="/var/lib/kubelet/pods/76d04662-7576-4f57-aca2-e118e5efd771/volumes" Feb 27 09:01:45 crc kubenswrapper[4906]: I0227 09:01:45.553251 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 09:01:45 crc kubenswrapper[4906]: E0227 09:01:45.554528 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:01:57 crc kubenswrapper[4906]: I0227 09:01:57.552447 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 09:01:57 crc kubenswrapper[4906]: E0227 09:01:57.553543 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:02:00 crc kubenswrapper[4906]: I0227 09:02:00.147683 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536382-m9ncn"] Feb 27 09:02:00 crc kubenswrapper[4906]: I0227 09:02:00.149642 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536382-m9ncn" Feb 27 09:02:00 crc kubenswrapper[4906]: I0227 09:02:00.153693 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 09:02:00 crc kubenswrapper[4906]: I0227 09:02:00.156915 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 09:02:00 crc kubenswrapper[4906]: I0227 09:02:00.160251 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 09:02:00 crc kubenswrapper[4906]: I0227 09:02:00.179004 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536382-m9ncn"] Feb 27 09:02:00 crc kubenswrapper[4906]: I0227 09:02:00.274515 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgst7\" (UniqueName: \"kubernetes.io/projected/107ddf8b-5296-4d9c-92b7-c8ed0287c50c-kube-api-access-mgst7\") pod \"auto-csr-approver-29536382-m9ncn\" (UID: \"107ddf8b-5296-4d9c-92b7-c8ed0287c50c\") " pod="openshift-infra/auto-csr-approver-29536382-m9ncn" Feb 27 09:02:00 crc kubenswrapper[4906]: I0227 09:02:00.377834 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgst7\" (UniqueName: \"kubernetes.io/projected/107ddf8b-5296-4d9c-92b7-c8ed0287c50c-kube-api-access-mgst7\") pod \"auto-csr-approver-29536382-m9ncn\" (UID: \"107ddf8b-5296-4d9c-92b7-c8ed0287c50c\") " pod="openshift-infra/auto-csr-approver-29536382-m9ncn" Feb 27 09:02:00 crc kubenswrapper[4906]: I0227 09:02:00.400793 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgst7\" (UniqueName: \"kubernetes.io/projected/107ddf8b-5296-4d9c-92b7-c8ed0287c50c-kube-api-access-mgst7\") pod \"auto-csr-approver-29536382-m9ncn\" (UID: \"107ddf8b-5296-4d9c-92b7-c8ed0287c50c\") " pod="openshift-infra/auto-csr-approver-29536382-m9ncn" Feb 27 09:02:00 crc kubenswrapper[4906]: I0227 09:02:00.471928 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536382-m9ncn" Feb 27 09:02:01 crc kubenswrapper[4906]: I0227 09:02:01.120144 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536382-m9ncn"] Feb 27 09:02:02 crc kubenswrapper[4906]: I0227 09:02:02.054715 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536382-m9ncn" event={"ID":"107ddf8b-5296-4d9c-92b7-c8ed0287c50c","Type":"ContainerStarted","Data":"83cdb1141f31cbbb20c87bf7a2fcab8f9630165d38433d07f36f8cdf8ff2c555"} Feb 27 09:02:02 crc kubenswrapper[4906]: I0227 09:02:02.160966 4906 scope.go:117] "RemoveContainer" containerID="074aab155c138dea42273142d3dd99e2b677ea08a3edf628dad71e9e6b3a0b15" Feb 27 09:02:02 crc kubenswrapper[4906]: I0227 09:02:02.214182 4906 scope.go:117] "RemoveContainer" containerID="7afff095932d9429eea67d34bf6f9ac2503e2c1396391f4b0d1fa256b46e910f" Feb 27 09:02:02 crc kubenswrapper[4906]: I0227 09:02:02.288813 4906 scope.go:117] "RemoveContainer" containerID="60cfeacf822988c4a0a71e9f4902d3c855faa08dabb8cce270594b0a54784f19" Feb 27 09:02:02 crc kubenswrapper[4906]: I0227 09:02:02.353179 4906 scope.go:117] "RemoveContainer" containerID="72df2c35f2b9841c7cebfdf6f3a99c7d0b8409815b6af3a1c36acb589987223f" Feb 27 09:02:03 crc kubenswrapper[4906]: I0227 09:02:03.067485 4906 generic.go:334] "Generic (PLEG): container finished" podID="107ddf8b-5296-4d9c-92b7-c8ed0287c50c" containerID="09caffd51bd53bec733f2c1ef08958a7cc4aefe1cc006598dee6cae5157c0aae" exitCode=0 Feb 27 09:02:03 crc kubenswrapper[4906]: I0227 09:02:03.067682 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536382-m9ncn" event={"ID":"107ddf8b-5296-4d9c-92b7-c8ed0287c50c","Type":"ContainerDied","Data":"09caffd51bd53bec733f2c1ef08958a7cc4aefe1cc006598dee6cae5157c0aae"} Feb 27 09:02:04 crc kubenswrapper[4906]: I0227 09:02:04.054401 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-75hmz"] Feb 27 09:02:04 crc kubenswrapper[4906]: I0227 09:02:04.062590 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-75hmz"] Feb 27 09:02:04 crc kubenswrapper[4906]: I0227 09:02:04.440452 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536382-m9ncn" Feb 27 09:02:04 crc kubenswrapper[4906]: I0227 09:02:04.562926 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c831978a-e34a-46bf-99c4-bfdffd022f43" path="/var/lib/kubelet/pods/c831978a-e34a-46bf-99c4-bfdffd022f43/volumes" Feb 27 09:02:04 crc kubenswrapper[4906]: I0227 09:02:04.616789 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgst7\" (UniqueName: \"kubernetes.io/projected/107ddf8b-5296-4d9c-92b7-c8ed0287c50c-kube-api-access-mgst7\") pod \"107ddf8b-5296-4d9c-92b7-c8ed0287c50c\" (UID: \"107ddf8b-5296-4d9c-92b7-c8ed0287c50c\") " Feb 27 09:02:04 crc kubenswrapper[4906]: I0227 09:02:04.625355 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/107ddf8b-5296-4d9c-92b7-c8ed0287c50c-kube-api-access-mgst7" (OuterVolumeSpecName: "kube-api-access-mgst7") pod "107ddf8b-5296-4d9c-92b7-c8ed0287c50c" (UID: "107ddf8b-5296-4d9c-92b7-c8ed0287c50c"). InnerVolumeSpecName "kube-api-access-mgst7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:02:04 crc kubenswrapper[4906]: I0227 09:02:04.720247 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgst7\" (UniqueName: \"kubernetes.io/projected/107ddf8b-5296-4d9c-92b7-c8ed0287c50c-kube-api-access-mgst7\") on node \"crc\" DevicePath \"\"" Feb 27 09:02:05 crc kubenswrapper[4906]: I0227 09:02:05.091757 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536382-m9ncn" event={"ID":"107ddf8b-5296-4d9c-92b7-c8ed0287c50c","Type":"ContainerDied","Data":"83cdb1141f31cbbb20c87bf7a2fcab8f9630165d38433d07f36f8cdf8ff2c555"} Feb 27 09:02:05 crc kubenswrapper[4906]: I0227 09:02:05.091837 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="83cdb1141f31cbbb20c87bf7a2fcab8f9630165d38433d07f36f8cdf8ff2c555" Feb 27 09:02:05 crc kubenswrapper[4906]: I0227 09:02:05.091970 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536382-m9ncn" Feb 27 09:02:05 crc kubenswrapper[4906]: I0227 09:02:05.533580 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536376-dbcs9"] Feb 27 09:02:05 crc kubenswrapper[4906]: I0227 09:02:05.546099 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536376-dbcs9"] Feb 27 09:02:06 crc kubenswrapper[4906]: I0227 09:02:06.034113 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-5hsm6"] Feb 27 09:02:06 crc kubenswrapper[4906]: I0227 09:02:06.045422 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-5hsm6"] Feb 27 09:02:06 crc kubenswrapper[4906]: I0227 09:02:06.565227 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b86e3b5-b8e0-48ce-8903-4dc93860723f" path="/var/lib/kubelet/pods/9b86e3b5-b8e0-48ce-8903-4dc93860723f/volumes" Feb 27 09:02:06 crc kubenswrapper[4906]: I0227 09:02:06.565792 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5a10e44-232f-494b-b06e-01dbcc0f8493" path="/var/lib/kubelet/pods/a5a10e44-232f-494b-b06e-01dbcc0f8493/volumes" Feb 27 09:02:08 crc kubenswrapper[4906]: I0227 09:02:08.552749 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 09:02:08 crc kubenswrapper[4906]: E0227 09:02:08.553870 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:02:09 crc kubenswrapper[4906]: I0227 09:02:09.032024 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-cjk9b"] Feb 27 09:02:09 crc kubenswrapper[4906]: I0227 09:02:09.042854 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-9692-account-create-update-c6k95"] Feb 27 09:02:09 crc kubenswrapper[4906]: I0227 09:02:09.052551 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-cjk9b"] Feb 27 09:02:09 crc kubenswrapper[4906]: I0227 09:02:09.062823 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-9692-account-create-update-c6k95"] Feb 27 09:02:10 crc kubenswrapper[4906]: I0227 09:02:10.564502 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b" path="/var/lib/kubelet/pods/6bc52da4-fcb4-4a72-ad6e-b4ca52eca30b/volumes" Feb 27 09:02:10 crc kubenswrapper[4906]: I0227 09:02:10.565652 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82" path="/var/lib/kubelet/pods/d7cdbcd3-cfe0-4932-9bd5-2b0ec3553c82/volumes" Feb 27 09:02:11 crc kubenswrapper[4906]: I0227 09:02:11.040361 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0b66-account-create-update-t85cr"] Feb 27 09:02:11 crc kubenswrapper[4906]: I0227 09:02:11.049752 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0b66-account-create-update-t85cr"] Feb 27 09:02:11 crc kubenswrapper[4906]: I0227 09:02:11.060436 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-5654-account-create-update-ljxcj"] Feb 27 09:02:11 crc kubenswrapper[4906]: I0227 09:02:11.069656 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-5654-account-create-update-ljxcj"] Feb 27 09:02:12 crc kubenswrapper[4906]: I0227 09:02:12.566172 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d2a80d9-824f-416e-a035-de905d169e5d" path="/var/lib/kubelet/pods/1d2a80d9-824f-416e-a035-de905d169e5d/volumes" Feb 27 09:02:12 crc kubenswrapper[4906]: I0227 09:02:12.567308 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f0033485-16b3-4a98-a7c2-588f320dddee" path="/var/lib/kubelet/pods/f0033485-16b3-4a98-a7c2-588f320dddee/volumes" Feb 27 09:02:21 crc kubenswrapper[4906]: I0227 09:02:21.552867 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 09:02:21 crc kubenswrapper[4906]: E0227 09:02:21.554204 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:02:27 crc kubenswrapper[4906]: I0227 09:02:27.320235 4906 generic.go:334] "Generic (PLEG): container finished" podID="e77dee0c-6c62-4257-b0cc-7c4befd35e69" containerID="4709ebd590e4d02ca60ceb3a6c573a22674343f61d614589bf809b550856aba6" exitCode=0 Feb 27 09:02:27 crc kubenswrapper[4906]: I0227 09:02:27.320323 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n" event={"ID":"e77dee0c-6c62-4257-b0cc-7c4befd35e69","Type":"ContainerDied","Data":"4709ebd590e4d02ca60ceb3a6c573a22674343f61d614589bf809b550856aba6"} Feb 27 09:02:28 crc kubenswrapper[4906]: I0227 09:02:28.778169 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n" Feb 27 09:02:28 crc kubenswrapper[4906]: I0227 09:02:28.962735 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzfrn\" (UniqueName: \"kubernetes.io/projected/e77dee0c-6c62-4257-b0cc-7c4befd35e69-kube-api-access-qzfrn\") pod \"e77dee0c-6c62-4257-b0cc-7c4befd35e69\" (UID: \"e77dee0c-6c62-4257-b0cc-7c4befd35e69\") " Feb 27 09:02:28 crc kubenswrapper[4906]: I0227 09:02:28.962963 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e77dee0c-6c62-4257-b0cc-7c4befd35e69-inventory\") pod \"e77dee0c-6c62-4257-b0cc-7c4befd35e69\" (UID: \"e77dee0c-6c62-4257-b0cc-7c4befd35e69\") " Feb 27 09:02:28 crc kubenswrapper[4906]: I0227 09:02:28.963109 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e77dee0c-6c62-4257-b0cc-7c4befd35e69-ssh-key-openstack-edpm-ipam\") pod \"e77dee0c-6c62-4257-b0cc-7c4befd35e69\" (UID: \"e77dee0c-6c62-4257-b0cc-7c4befd35e69\") " Feb 27 09:02:28 crc kubenswrapper[4906]: I0227 09:02:28.970495 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e77dee0c-6c62-4257-b0cc-7c4befd35e69-kube-api-access-qzfrn" (OuterVolumeSpecName: "kube-api-access-qzfrn") pod "e77dee0c-6c62-4257-b0cc-7c4befd35e69" (UID: "e77dee0c-6c62-4257-b0cc-7c4befd35e69"). InnerVolumeSpecName "kube-api-access-qzfrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:02:28 crc kubenswrapper[4906]: I0227 09:02:28.994973 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e77dee0c-6c62-4257-b0cc-7c4befd35e69-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "e77dee0c-6c62-4257-b0cc-7c4befd35e69" (UID: "e77dee0c-6c62-4257-b0cc-7c4befd35e69"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:02:28 crc kubenswrapper[4906]: I0227 09:02:28.999219 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e77dee0c-6c62-4257-b0cc-7c4befd35e69-inventory" (OuterVolumeSpecName: "inventory") pod "e77dee0c-6c62-4257-b0cc-7c4befd35e69" (UID: "e77dee0c-6c62-4257-b0cc-7c4befd35e69"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.068672 4906 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e77dee0c-6c62-4257-b0cc-7c4befd35e69-inventory\") on node \"crc\" DevicePath \"\"" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.068743 4906 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e77dee0c-6c62-4257-b0cc-7c4befd35e69-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.068766 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzfrn\" (UniqueName: \"kubernetes.io/projected/e77dee0c-6c62-4257-b0cc-7c4befd35e69-kube-api-access-qzfrn\") on node \"crc\" DevicePath \"\"" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.346761 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.346960 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n" event={"ID":"e77dee0c-6c62-4257-b0cc-7c4befd35e69","Type":"ContainerDied","Data":"42c7937c2b38c4eb4760d7c58862720d6085972c6f3e338e43b0ce2c942d1004"} Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.347032 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42c7937c2b38c4eb4760d7c58862720d6085972c6f3e338e43b0ce2c942d1004" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.444117 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7"] Feb 27 09:02:29 crc kubenswrapper[4906]: E0227 09:02:29.444674 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e77dee0c-6c62-4257-b0cc-7c4befd35e69" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.444699 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="e77dee0c-6c62-4257-b0cc-7c4befd35e69" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Feb 27 09:02:29 crc kubenswrapper[4906]: E0227 09:02:29.444716 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="107ddf8b-5296-4d9c-92b7-c8ed0287c50c" containerName="oc" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.444724 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="107ddf8b-5296-4d9c-92b7-c8ed0287c50c" containerName="oc" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.444945 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="107ddf8b-5296-4d9c-92b7-c8ed0287c50c" containerName="oc" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.444978 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="e77dee0c-6c62-4257-b0cc-7c4befd35e69" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.445647 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.447956 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d466j" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.448252 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.449823 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.450251 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.457634 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7"] Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.579304 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8j25z\" (UniqueName: \"kubernetes.io/projected/953a02f5-56dc-4fe0-b20f-158522e6d7d9-kube-api-access-8j25z\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7\" (UID: \"953a02f5-56dc-4fe0-b20f-158522e6d7d9\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.579400 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/953a02f5-56dc-4fe0-b20f-158522e6d7d9-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7\" (UID: \"953a02f5-56dc-4fe0-b20f-158522e6d7d9\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.580288 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/953a02f5-56dc-4fe0-b20f-158522e6d7d9-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7\" (UID: \"953a02f5-56dc-4fe0-b20f-158522e6d7d9\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.682793 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8j25z\" (UniqueName: \"kubernetes.io/projected/953a02f5-56dc-4fe0-b20f-158522e6d7d9-kube-api-access-8j25z\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7\" (UID: \"953a02f5-56dc-4fe0-b20f-158522e6d7d9\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.682941 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/953a02f5-56dc-4fe0-b20f-158522e6d7d9-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7\" (UID: \"953a02f5-56dc-4fe0-b20f-158522e6d7d9\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.683018 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/953a02f5-56dc-4fe0-b20f-158522e6d7d9-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7\" (UID: \"953a02f5-56dc-4fe0-b20f-158522e6d7d9\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.688174 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/953a02f5-56dc-4fe0-b20f-158522e6d7d9-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7\" (UID: \"953a02f5-56dc-4fe0-b20f-158522e6d7d9\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.688493 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/953a02f5-56dc-4fe0-b20f-158522e6d7d9-ssh-key-openstack-edpm-ipam\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7\" (UID: \"953a02f5-56dc-4fe0-b20f-158522e6d7d9\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.705797 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8j25z\" (UniqueName: \"kubernetes.io/projected/953a02f5-56dc-4fe0-b20f-158522e6d7d9-kube-api-access-8j25z\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7\" (UID: \"953a02f5-56dc-4fe0-b20f-158522e6d7d9\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7" Feb 27 09:02:29 crc kubenswrapper[4906]: I0227 09:02:29.766395 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7" Feb 27 09:02:30 crc kubenswrapper[4906]: I0227 09:02:30.355589 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7"] Feb 27 09:02:31 crc kubenswrapper[4906]: I0227 09:02:31.369685 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7" event={"ID":"953a02f5-56dc-4fe0-b20f-158522e6d7d9","Type":"ContainerStarted","Data":"2bf8e437c94c1fb82b88cd8554611460bc7e3c34eaedfef9d8b595b3b27915f4"} Feb 27 09:02:31 crc kubenswrapper[4906]: I0227 09:02:31.370078 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7" event={"ID":"953a02f5-56dc-4fe0-b20f-158522e6d7d9","Type":"ContainerStarted","Data":"c99c4d09460c1dcc35e29e81f22ef61b77fd0a9ad6128b9c39aa16471a39fbe7"} Feb 27 09:02:31 crc kubenswrapper[4906]: I0227 09:02:31.402152 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7" podStartSLOduration=1.706009603 podStartE2EDuration="2.402122076s" podCreationTimestamp="2026-02-27 09:02:29 +0000 UTC" firstStartedPulling="2026-02-27 09:02:30.360572036 +0000 UTC m=+2048.754973646" lastFinishedPulling="2026-02-27 09:02:31.056684499 +0000 UTC m=+2049.451086119" observedRunningTime="2026-02-27 09:02:31.391365053 +0000 UTC m=+2049.785766663" watchObservedRunningTime="2026-02-27 09:02:31.402122076 +0000 UTC m=+2049.796523686" Feb 27 09:02:35 crc kubenswrapper[4906]: I0227 09:02:35.553349 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 09:02:35 crc kubenswrapper[4906]: E0227 09:02:35.554215 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:02:36 crc kubenswrapper[4906]: I0227 09:02:36.422731 4906 generic.go:334] "Generic (PLEG): container finished" podID="953a02f5-56dc-4fe0-b20f-158522e6d7d9" containerID="2bf8e437c94c1fb82b88cd8554611460bc7e3c34eaedfef9d8b595b3b27915f4" exitCode=0 Feb 27 09:02:36 crc kubenswrapper[4906]: I0227 09:02:36.422829 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7" event={"ID":"953a02f5-56dc-4fe0-b20f-158522e6d7d9","Type":"ContainerDied","Data":"2bf8e437c94c1fb82b88cd8554611460bc7e3c34eaedfef9d8b595b3b27915f4"} Feb 27 09:02:37 crc kubenswrapper[4906]: I0227 09:02:37.922778 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.105298 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8j25z\" (UniqueName: \"kubernetes.io/projected/953a02f5-56dc-4fe0-b20f-158522e6d7d9-kube-api-access-8j25z\") pod \"953a02f5-56dc-4fe0-b20f-158522e6d7d9\" (UID: \"953a02f5-56dc-4fe0-b20f-158522e6d7d9\") " Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.105632 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/953a02f5-56dc-4fe0-b20f-158522e6d7d9-inventory\") pod \"953a02f5-56dc-4fe0-b20f-158522e6d7d9\" (UID: \"953a02f5-56dc-4fe0-b20f-158522e6d7d9\") " Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.105733 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/953a02f5-56dc-4fe0-b20f-158522e6d7d9-ssh-key-openstack-edpm-ipam\") pod \"953a02f5-56dc-4fe0-b20f-158522e6d7d9\" (UID: \"953a02f5-56dc-4fe0-b20f-158522e6d7d9\") " Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.113184 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/953a02f5-56dc-4fe0-b20f-158522e6d7d9-kube-api-access-8j25z" (OuterVolumeSpecName: "kube-api-access-8j25z") pod "953a02f5-56dc-4fe0-b20f-158522e6d7d9" (UID: "953a02f5-56dc-4fe0-b20f-158522e6d7d9"). InnerVolumeSpecName "kube-api-access-8j25z". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.137648 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/953a02f5-56dc-4fe0-b20f-158522e6d7d9-inventory" (OuterVolumeSpecName: "inventory") pod "953a02f5-56dc-4fe0-b20f-158522e6d7d9" (UID: "953a02f5-56dc-4fe0-b20f-158522e6d7d9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.154135 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/953a02f5-56dc-4fe0-b20f-158522e6d7d9-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "953a02f5-56dc-4fe0-b20f-158522e6d7d9" (UID: "953a02f5-56dc-4fe0-b20f-158522e6d7d9"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.208709 4906 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/953a02f5-56dc-4fe0-b20f-158522e6d7d9-inventory\") on node \"crc\" DevicePath \"\"" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.208764 4906 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/953a02f5-56dc-4fe0-b20f-158522e6d7d9-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.208778 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8j25z\" (UniqueName: \"kubernetes.io/projected/953a02f5-56dc-4fe0-b20f-158522e6d7d9-kube-api-access-8j25z\") on node \"crc\" DevicePath \"\"" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.451600 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7" event={"ID":"953a02f5-56dc-4fe0-b20f-158522e6d7d9","Type":"ContainerDied","Data":"c99c4d09460c1dcc35e29e81f22ef61b77fd0a9ad6128b9c39aa16471a39fbe7"} Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.451664 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c99c4d09460c1dcc35e29e81f22ef61b77fd0a9ad6128b9c39aa16471a39fbe7" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.451731 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.547621 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8"] Feb 27 09:02:38 crc kubenswrapper[4906]: E0227 09:02:38.548175 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="953a02f5-56dc-4fe0-b20f-158522e6d7d9" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.548197 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="953a02f5-56dc-4fe0-b20f-158522e6d7d9" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.548441 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="953a02f5-56dc-4fe0-b20f-158522e6d7d9" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.549354 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.552059 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.552636 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d466j" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.552843 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.554804 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.567754 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8"] Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.721159 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgsr6\" (UniqueName: \"kubernetes.io/projected/0d21edbc-e0a7-453e-81c7-ebb897fa20fb-kube-api-access-zgsr6\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-ff8w8\" (UID: \"0d21edbc-e0a7-453e-81c7-ebb897fa20fb\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.721260 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0d21edbc-e0a7-453e-81c7-ebb897fa20fb-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-ff8w8\" (UID: \"0d21edbc-e0a7-453e-81c7-ebb897fa20fb\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.721769 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d21edbc-e0a7-453e-81c7-ebb897fa20fb-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-ff8w8\" (UID: \"0d21edbc-e0a7-453e-81c7-ebb897fa20fb\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.824304 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d21edbc-e0a7-453e-81c7-ebb897fa20fb-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-ff8w8\" (UID: \"0d21edbc-e0a7-453e-81c7-ebb897fa20fb\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.824402 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgsr6\" (UniqueName: \"kubernetes.io/projected/0d21edbc-e0a7-453e-81c7-ebb897fa20fb-kube-api-access-zgsr6\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-ff8w8\" (UID: \"0d21edbc-e0a7-453e-81c7-ebb897fa20fb\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.824441 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0d21edbc-e0a7-453e-81c7-ebb897fa20fb-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-ff8w8\" (UID: \"0d21edbc-e0a7-453e-81c7-ebb897fa20fb\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.833694 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0d21edbc-e0a7-453e-81c7-ebb897fa20fb-ssh-key-openstack-edpm-ipam\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-ff8w8\" (UID: \"0d21edbc-e0a7-453e-81c7-ebb897fa20fb\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.835697 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d21edbc-e0a7-453e-81c7-ebb897fa20fb-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-ff8w8\" (UID: \"0d21edbc-e0a7-453e-81c7-ebb897fa20fb\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.844663 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgsr6\" (UniqueName: \"kubernetes.io/projected/0d21edbc-e0a7-453e-81c7-ebb897fa20fb-kube-api-access-zgsr6\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-ff8w8\" (UID: \"0d21edbc-e0a7-453e-81c7-ebb897fa20fb\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8" Feb 27 09:02:38 crc kubenswrapper[4906]: I0227 09:02:38.889344 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8" Feb 27 09:02:39 crc kubenswrapper[4906]: I0227 09:02:39.450762 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8"] Feb 27 09:02:40 crc kubenswrapper[4906]: I0227 09:02:40.481930 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8" event={"ID":"0d21edbc-e0a7-453e-81c7-ebb897fa20fb","Type":"ContainerStarted","Data":"aa9ca9e90fa9d7ffd8ba4c251ee29fc5ba3a8381dbce7d2fad4a6974f143a1c5"} Feb 27 09:02:41 crc kubenswrapper[4906]: I0227 09:02:41.495558 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8" event={"ID":"0d21edbc-e0a7-453e-81c7-ebb897fa20fb","Type":"ContainerStarted","Data":"5498451a8bbc35cd807b989498a528598029d82edfc82e92a59117fba3dd9745"} Feb 27 09:02:41 crc kubenswrapper[4906]: I0227 09:02:41.527288 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8" podStartSLOduration=2.689325126 podStartE2EDuration="3.527264495s" podCreationTimestamp="2026-02-27 09:02:38 +0000 UTC" firstStartedPulling="2026-02-27 09:02:39.474989085 +0000 UTC m=+2057.869390715" lastFinishedPulling="2026-02-27 09:02:40.312928474 +0000 UTC m=+2058.707330084" observedRunningTime="2026-02-27 09:02:41.517166989 +0000 UTC m=+2059.911568629" watchObservedRunningTime="2026-02-27 09:02:41.527264495 +0000 UTC m=+2059.921666105" Feb 27 09:02:46 crc kubenswrapper[4906]: I0227 09:02:46.553574 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 09:02:46 crc kubenswrapper[4906]: E0227 09:02:46.554455 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:02:59 crc kubenswrapper[4906]: I0227 09:02:59.050831 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-p544r"] Feb 27 09:02:59 crc kubenswrapper[4906]: I0227 09:02:59.063174 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-p544r"] Feb 27 09:03:00 crc kubenswrapper[4906]: I0227 09:03:00.553853 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 09:03:00 crc kubenswrapper[4906]: I0227 09:03:00.566083 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b" path="/var/lib/kubelet/pods/fb5fa7bc-377b-43b6-b68e-2710b1bbdd8b/volumes" Feb 27 09:03:01 crc kubenswrapper[4906]: I0227 09:03:01.736266 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerStarted","Data":"9e0140582f9bf5221401dd77c11465d92b2dac3c13999181c7540eb7eab49661"} Feb 27 09:03:02 crc kubenswrapper[4906]: I0227 09:03:02.540801 4906 scope.go:117] "RemoveContainer" containerID="b19539382c9a1f6da8731edc6dc197a43ee30da3378623c2d2f0f72da2858b44" Feb 27 09:03:02 crc kubenswrapper[4906]: I0227 09:03:02.586973 4906 scope.go:117] "RemoveContainer" containerID="9d0e8a0faa98f4bbb52fddfbdb315807fc911355169f50a0567651ff92cf2f30" Feb 27 09:03:02 crc kubenswrapper[4906]: I0227 09:03:02.628549 4906 scope.go:117] "RemoveContainer" containerID="f9cbe66b3d932a3f7ec3d208f69f8d28b80515d8665e45108857c970c1886402" Feb 27 09:03:02 crc kubenswrapper[4906]: I0227 09:03:02.672669 4906 scope.go:117] "RemoveContainer" containerID="76b339b33a0b28bc3f6fa912f3e45da2330cbcbbea668116e53832ff3901ea61" Feb 27 09:03:02 crc kubenswrapper[4906]: I0227 09:03:02.716183 4906 scope.go:117] "RemoveContainer" containerID="ebc6421b861a9164fd06622c9f90613ca130d8585055607e360ca2425d491e72" Feb 27 09:03:02 crc kubenswrapper[4906]: I0227 09:03:02.772794 4906 scope.go:117] "RemoveContainer" containerID="13547733979dada0ef59022f262dc14d07f4f903a24002918bf71775551085cb" Feb 27 09:03:02 crc kubenswrapper[4906]: I0227 09:03:02.817606 4906 scope.go:117] "RemoveContainer" containerID="2b2e3e6ca6758894e548e3731051e66d503701c972c66aae0ebb4abf976b6b7a" Feb 27 09:03:02 crc kubenswrapper[4906]: I0227 09:03:02.869869 4906 scope.go:117] "RemoveContainer" containerID="b722e746a29621b0ed9453587181d6dc0090cef7cddbd963e27e0510f68a724a" Feb 27 09:03:14 crc kubenswrapper[4906]: I0227 09:03:14.881773 4906 generic.go:334] "Generic (PLEG): container finished" podID="0d21edbc-e0a7-453e-81c7-ebb897fa20fb" containerID="5498451a8bbc35cd807b989498a528598029d82edfc82e92a59117fba3dd9745" exitCode=0 Feb 27 09:03:14 crc kubenswrapper[4906]: I0227 09:03:14.881901 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8" event={"ID":"0d21edbc-e0a7-453e-81c7-ebb897fa20fb","Type":"ContainerDied","Data":"5498451a8bbc35cd807b989498a528598029d82edfc82e92a59117fba3dd9745"} Feb 27 09:03:16 crc kubenswrapper[4906]: I0227 09:03:16.352220 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8" Feb 27 09:03:16 crc kubenswrapper[4906]: I0227 09:03:16.496284 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0d21edbc-e0a7-453e-81c7-ebb897fa20fb-ssh-key-openstack-edpm-ipam\") pod \"0d21edbc-e0a7-453e-81c7-ebb897fa20fb\" (UID: \"0d21edbc-e0a7-453e-81c7-ebb897fa20fb\") " Feb 27 09:03:16 crc kubenswrapper[4906]: I0227 09:03:16.496469 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgsr6\" (UniqueName: \"kubernetes.io/projected/0d21edbc-e0a7-453e-81c7-ebb897fa20fb-kube-api-access-zgsr6\") pod \"0d21edbc-e0a7-453e-81c7-ebb897fa20fb\" (UID: \"0d21edbc-e0a7-453e-81c7-ebb897fa20fb\") " Feb 27 09:03:16 crc kubenswrapper[4906]: I0227 09:03:16.496608 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d21edbc-e0a7-453e-81c7-ebb897fa20fb-inventory\") pod \"0d21edbc-e0a7-453e-81c7-ebb897fa20fb\" (UID: \"0d21edbc-e0a7-453e-81c7-ebb897fa20fb\") " Feb 27 09:03:16 crc kubenswrapper[4906]: I0227 09:03:16.503515 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d21edbc-e0a7-453e-81c7-ebb897fa20fb-kube-api-access-zgsr6" (OuterVolumeSpecName: "kube-api-access-zgsr6") pod "0d21edbc-e0a7-453e-81c7-ebb897fa20fb" (UID: "0d21edbc-e0a7-453e-81c7-ebb897fa20fb"). InnerVolumeSpecName "kube-api-access-zgsr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:03:16 crc kubenswrapper[4906]: I0227 09:03:16.523939 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d21edbc-e0a7-453e-81c7-ebb897fa20fb-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "0d21edbc-e0a7-453e-81c7-ebb897fa20fb" (UID: "0d21edbc-e0a7-453e-81c7-ebb897fa20fb"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:03:16 crc kubenswrapper[4906]: I0227 09:03:16.526546 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d21edbc-e0a7-453e-81c7-ebb897fa20fb-inventory" (OuterVolumeSpecName: "inventory") pod "0d21edbc-e0a7-453e-81c7-ebb897fa20fb" (UID: "0d21edbc-e0a7-453e-81c7-ebb897fa20fb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:03:16 crc kubenswrapper[4906]: I0227 09:03:16.599304 4906 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0d21edbc-e0a7-453e-81c7-ebb897fa20fb-inventory\") on node \"crc\" DevicePath \"\"" Feb 27 09:03:16 crc kubenswrapper[4906]: I0227 09:03:16.599361 4906 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0d21edbc-e0a7-453e-81c7-ebb897fa20fb-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 27 09:03:16 crc kubenswrapper[4906]: I0227 09:03:16.599376 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgsr6\" (UniqueName: \"kubernetes.io/projected/0d21edbc-e0a7-453e-81c7-ebb897fa20fb-kube-api-access-zgsr6\") on node \"crc\" DevicePath \"\"" Feb 27 09:03:16 crc kubenswrapper[4906]: I0227 09:03:16.903350 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8" event={"ID":"0d21edbc-e0a7-453e-81c7-ebb897fa20fb","Type":"ContainerDied","Data":"aa9ca9e90fa9d7ffd8ba4c251ee29fc5ba3a8381dbce7d2fad4a6974f143a1c5"} Feb 27 09:03:16 crc kubenswrapper[4906]: I0227 09:03:16.903406 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aa9ca9e90fa9d7ffd8ba4c251ee29fc5ba3a8381dbce7d2fad4a6974f143a1c5" Feb 27 09:03:16 crc kubenswrapper[4906]: I0227 09:03:16.903424 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-ff8w8" Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.006541 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj"] Feb 27 09:03:17 crc kubenswrapper[4906]: E0227 09:03:17.007282 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d21edbc-e0a7-453e-81c7-ebb897fa20fb" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.007303 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d21edbc-e0a7-453e-81c7-ebb897fa20fb" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.007535 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d21edbc-e0a7-453e-81c7-ebb897fa20fb" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.008282 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj" Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.014602 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.014673 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d466j" Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.015005 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.015297 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.029829 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj"] Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.110090 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5988b580-0d6e-4c0f-9843-7088d1329575-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj\" (UID: \"5988b580-0d6e-4c0f-9843-7088d1329575\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj" Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.110162 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5cdm\" (UniqueName: \"kubernetes.io/projected/5988b580-0d6e-4c0f-9843-7088d1329575-kube-api-access-k5cdm\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj\" (UID: \"5988b580-0d6e-4c0f-9843-7088d1329575\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj" Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.110193 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5988b580-0d6e-4c0f-9843-7088d1329575-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj\" (UID: \"5988b580-0d6e-4c0f-9843-7088d1329575\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj" Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.212402 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5988b580-0d6e-4c0f-9843-7088d1329575-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj\" (UID: \"5988b580-0d6e-4c0f-9843-7088d1329575\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj" Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.212471 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5cdm\" (UniqueName: \"kubernetes.io/projected/5988b580-0d6e-4c0f-9843-7088d1329575-kube-api-access-k5cdm\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj\" (UID: \"5988b580-0d6e-4c0f-9843-7088d1329575\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj" Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.212502 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5988b580-0d6e-4c0f-9843-7088d1329575-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj\" (UID: \"5988b580-0d6e-4c0f-9843-7088d1329575\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj" Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.217648 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5988b580-0d6e-4c0f-9843-7088d1329575-ssh-key-openstack-edpm-ipam\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj\" (UID: \"5988b580-0d6e-4c0f-9843-7088d1329575\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj" Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.219348 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5988b580-0d6e-4c0f-9843-7088d1329575-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj\" (UID: \"5988b580-0d6e-4c0f-9843-7088d1329575\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj" Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.231689 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5cdm\" (UniqueName: \"kubernetes.io/projected/5988b580-0d6e-4c0f-9843-7088d1329575-kube-api-access-k5cdm\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj\" (UID: \"5988b580-0d6e-4c0f-9843-7088d1329575\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj" Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.326335 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj" Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.708050 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj"] Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.728000 4906 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 27 09:03:17 crc kubenswrapper[4906]: I0227 09:03:17.916795 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj" event={"ID":"5988b580-0d6e-4c0f-9843-7088d1329575","Type":"ContainerStarted","Data":"963a068bf1b5f0e99161f769f1374c956733631364cdec6a586eac4767634996"} Feb 27 09:03:18 crc kubenswrapper[4906]: I0227 09:03:18.929697 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj" event={"ID":"5988b580-0d6e-4c0f-9843-7088d1329575","Type":"ContainerStarted","Data":"4fe68e5a3d52318428bbe56142ccbd92e571a30bb75373248e8537bda714e6cc"} Feb 27 09:03:18 crc kubenswrapper[4906]: I0227 09:03:18.951239 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj" podStartSLOduration=2.26396425 podStartE2EDuration="2.95121337s" podCreationTimestamp="2026-02-27 09:03:16 +0000 UTC" firstStartedPulling="2026-02-27 09:03:17.727724549 +0000 UTC m=+2096.122126159" lastFinishedPulling="2026-02-27 09:03:18.414973669 +0000 UTC m=+2096.809375279" observedRunningTime="2026-02-27 09:03:18.95044114 +0000 UTC m=+2097.344842750" watchObservedRunningTime="2026-02-27 09:03:18.95121337 +0000 UTC m=+2097.345614980" Feb 27 09:03:24 crc kubenswrapper[4906]: I0227 09:03:24.046674 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-ld4zf"] Feb 27 09:03:24 crc kubenswrapper[4906]: I0227 09:03:24.054446 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-ld4zf"] Feb 27 09:03:24 crc kubenswrapper[4906]: I0227 09:03:24.563638 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="293f689b-eec4-4963-9036-b5fc98dcbcaa" path="/var/lib/kubelet/pods/293f689b-eec4-4963-9036-b5fc98dcbcaa/volumes" Feb 27 09:03:25 crc kubenswrapper[4906]: I0227 09:03:25.543000 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vbwx4"] Feb 27 09:03:25 crc kubenswrapper[4906]: I0227 09:03:25.545824 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vbwx4" Feb 27 09:03:25 crc kubenswrapper[4906]: I0227 09:03:25.555342 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vbwx4"] Feb 27 09:03:25 crc kubenswrapper[4906]: I0227 09:03:25.603992 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7-utilities\") pod \"redhat-operators-vbwx4\" (UID: \"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7\") " pod="openshift-marketplace/redhat-operators-vbwx4" Feb 27 09:03:25 crc kubenswrapper[4906]: I0227 09:03:25.604057 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vj8bp\" (UniqueName: \"kubernetes.io/projected/2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7-kube-api-access-vj8bp\") pod \"redhat-operators-vbwx4\" (UID: \"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7\") " pod="openshift-marketplace/redhat-operators-vbwx4" Feb 27 09:03:25 crc kubenswrapper[4906]: I0227 09:03:25.604318 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7-catalog-content\") pod \"redhat-operators-vbwx4\" (UID: \"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7\") " pod="openshift-marketplace/redhat-operators-vbwx4" Feb 27 09:03:25 crc kubenswrapper[4906]: I0227 09:03:25.707134 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7-utilities\") pod \"redhat-operators-vbwx4\" (UID: \"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7\") " pod="openshift-marketplace/redhat-operators-vbwx4" Feb 27 09:03:25 crc kubenswrapper[4906]: I0227 09:03:25.707208 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vj8bp\" (UniqueName: \"kubernetes.io/projected/2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7-kube-api-access-vj8bp\") pod \"redhat-operators-vbwx4\" (UID: \"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7\") " pod="openshift-marketplace/redhat-operators-vbwx4" Feb 27 09:03:25 crc kubenswrapper[4906]: I0227 09:03:25.707297 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7-catalog-content\") pod \"redhat-operators-vbwx4\" (UID: \"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7\") " pod="openshift-marketplace/redhat-operators-vbwx4" Feb 27 09:03:25 crc kubenswrapper[4906]: I0227 09:03:25.707849 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7-utilities\") pod \"redhat-operators-vbwx4\" (UID: \"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7\") " pod="openshift-marketplace/redhat-operators-vbwx4" Feb 27 09:03:25 crc kubenswrapper[4906]: I0227 09:03:25.707962 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7-catalog-content\") pod \"redhat-operators-vbwx4\" (UID: \"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7\") " pod="openshift-marketplace/redhat-operators-vbwx4" Feb 27 09:03:25 crc kubenswrapper[4906]: I0227 09:03:25.735767 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vj8bp\" (UniqueName: \"kubernetes.io/projected/2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7-kube-api-access-vj8bp\") pod \"redhat-operators-vbwx4\" (UID: \"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7\") " pod="openshift-marketplace/redhat-operators-vbwx4" Feb 27 09:03:25 crc kubenswrapper[4906]: I0227 09:03:25.866170 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vbwx4" Feb 27 09:03:26 crc kubenswrapper[4906]: I0227 09:03:26.409395 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vbwx4"] Feb 27 09:03:27 crc kubenswrapper[4906]: I0227 09:03:27.015721 4906 generic.go:334] "Generic (PLEG): container finished" podID="2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7" containerID="f11058bf96a01d1c68b497153be0fd1d4336e939802f340a7a294bab26044597" exitCode=0 Feb 27 09:03:27 crc kubenswrapper[4906]: I0227 09:03:27.015836 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vbwx4" event={"ID":"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7","Type":"ContainerDied","Data":"f11058bf96a01d1c68b497153be0fd1d4336e939802f340a7a294bab26044597"} Feb 27 09:03:27 crc kubenswrapper[4906]: I0227 09:03:27.016314 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vbwx4" event={"ID":"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7","Type":"ContainerStarted","Data":"a891ac2165575dd23c4b132c912cb683b6e70b8bf1dccb6833c42e95b9854c3c"} Feb 27 09:03:30 crc kubenswrapper[4906]: I0227 09:03:30.055232 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vbwx4" event={"ID":"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7","Type":"ContainerStarted","Data":"245f988398e8e29b13827e0525505b4b38fa6e041a21fff231e661aabedee7d0"} Feb 27 09:03:31 crc kubenswrapper[4906]: I0227 09:03:31.068325 4906 generic.go:334] "Generic (PLEG): container finished" podID="2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7" containerID="245f988398e8e29b13827e0525505b4b38fa6e041a21fff231e661aabedee7d0" exitCode=0 Feb 27 09:03:31 crc kubenswrapper[4906]: I0227 09:03:31.068399 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vbwx4" event={"ID":"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7","Type":"ContainerDied","Data":"245f988398e8e29b13827e0525505b4b38fa6e041a21fff231e661aabedee7d0"} Feb 27 09:03:32 crc kubenswrapper[4906]: I0227 09:03:32.082159 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vbwx4" event={"ID":"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7","Type":"ContainerStarted","Data":"76905e1d36b381892159800a6a37871ae6ee0c58c6a37078e7cbd4d02b517699"} Feb 27 09:03:33 crc kubenswrapper[4906]: I0227 09:03:33.109254 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vbwx4" podStartSLOduration=3.600823472 podStartE2EDuration="8.109227794s" podCreationTimestamp="2026-02-27 09:03:25 +0000 UTC" firstStartedPulling="2026-02-27 09:03:27.020431052 +0000 UTC m=+2105.414832662" lastFinishedPulling="2026-02-27 09:03:31.528835364 +0000 UTC m=+2109.923236984" observedRunningTime="2026-02-27 09:03:33.104988362 +0000 UTC m=+2111.499389992" watchObservedRunningTime="2026-02-27 09:03:33.109227794 +0000 UTC m=+2111.503629414" Feb 27 09:03:34 crc kubenswrapper[4906]: I0227 09:03:34.051372 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-n9r82"] Feb 27 09:03:34 crc kubenswrapper[4906]: I0227 09:03:34.060790 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-n9r82"] Feb 27 09:03:34 crc kubenswrapper[4906]: I0227 09:03:34.567098 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcd22869-73ce-4c75-8628-2bf971da33d5" path="/var/lib/kubelet/pods/bcd22869-73ce-4c75-8628-2bf971da33d5/volumes" Feb 27 09:03:35 crc kubenswrapper[4906]: I0227 09:03:35.867209 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vbwx4" Feb 27 09:03:35 crc kubenswrapper[4906]: I0227 09:03:35.867294 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vbwx4" Feb 27 09:03:37 crc kubenswrapper[4906]: I0227 09:03:37.755522 4906 patch_prober.go:28] interesting pod/nmstate-webhook-786f45cff4-qvmtw container/nmstate-webhook namespace/openshift-nmstate: Readiness probe status=failure output="Get \"https://10.217.0.33:9443/readyz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 27 09:03:37 crc kubenswrapper[4906]: I0227 09:03:37.755589 4906 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-nmstate/nmstate-webhook-786f45cff4-qvmtw" podUID="30e5e423-b09e-4ade-baa1-257731b5cc0b" containerName="nmstate-webhook" probeResult="failure" output="Get \"https://10.217.0.33:9443/readyz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 27 09:03:37 crc kubenswrapper[4906]: I0227 09:03:37.777429 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-vbwx4" podUID="2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7" containerName="registry-server" probeResult="failure" output=< Feb 27 09:03:37 crc kubenswrapper[4906]: timeout: failed to connect service ":50051" within 1s Feb 27 09:03:37 crc kubenswrapper[4906]: > Feb 27 09:03:45 crc kubenswrapper[4906]: I0227 09:03:45.974607 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vbwx4" Feb 27 09:03:46 crc kubenswrapper[4906]: I0227 09:03:46.031528 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vbwx4" Feb 27 09:03:46 crc kubenswrapper[4906]: I0227 09:03:46.214453 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vbwx4"] Feb 27 09:03:47 crc kubenswrapper[4906]: I0227 09:03:47.240303 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vbwx4" podUID="2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7" containerName="registry-server" containerID="cri-o://76905e1d36b381892159800a6a37871ae6ee0c58c6a37078e7cbd4d02b517699" gracePeriod=2 Feb 27 09:03:47 crc kubenswrapper[4906]: I0227 09:03:47.676339 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vbwx4" Feb 27 09:03:47 crc kubenswrapper[4906]: I0227 09:03:47.788906 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vj8bp\" (UniqueName: \"kubernetes.io/projected/2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7-kube-api-access-vj8bp\") pod \"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7\" (UID: \"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7\") " Feb 27 09:03:47 crc kubenswrapper[4906]: I0227 09:03:47.789578 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7-catalog-content\") pod \"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7\" (UID: \"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7\") " Feb 27 09:03:47 crc kubenswrapper[4906]: I0227 09:03:47.789679 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7-utilities\") pod \"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7\" (UID: \"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7\") " Feb 27 09:03:47 crc kubenswrapper[4906]: I0227 09:03:47.791078 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7-utilities" (OuterVolumeSpecName: "utilities") pod "2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7" (UID: "2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:03:47 crc kubenswrapper[4906]: I0227 09:03:47.800022 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7-kube-api-access-vj8bp" (OuterVolumeSpecName: "kube-api-access-vj8bp") pod "2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7" (UID: "2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7"). InnerVolumeSpecName "kube-api-access-vj8bp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:03:47 crc kubenswrapper[4906]: I0227 09:03:47.892560 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vj8bp\" (UniqueName: \"kubernetes.io/projected/2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7-kube-api-access-vj8bp\") on node \"crc\" DevicePath \"\"" Feb 27 09:03:47 crc kubenswrapper[4906]: I0227 09:03:47.892732 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 09:03:47 crc kubenswrapper[4906]: I0227 09:03:47.942965 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7" (UID: "2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:03:47 crc kubenswrapper[4906]: I0227 09:03:47.995820 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 09:03:48 crc kubenswrapper[4906]: I0227 09:03:48.255250 4906 generic.go:334] "Generic (PLEG): container finished" podID="2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7" containerID="76905e1d36b381892159800a6a37871ae6ee0c58c6a37078e7cbd4d02b517699" exitCode=0 Feb 27 09:03:48 crc kubenswrapper[4906]: I0227 09:03:48.255313 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vbwx4" event={"ID":"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7","Type":"ContainerDied","Data":"76905e1d36b381892159800a6a37871ae6ee0c58c6a37078e7cbd4d02b517699"} Feb 27 09:03:48 crc kubenswrapper[4906]: I0227 09:03:48.255361 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vbwx4" event={"ID":"2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7","Type":"ContainerDied","Data":"a891ac2165575dd23c4b132c912cb683b6e70b8bf1dccb6833c42e95b9854c3c"} Feb 27 09:03:48 crc kubenswrapper[4906]: I0227 09:03:48.255386 4906 scope.go:117] "RemoveContainer" containerID="76905e1d36b381892159800a6a37871ae6ee0c58c6a37078e7cbd4d02b517699" Feb 27 09:03:48 crc kubenswrapper[4906]: I0227 09:03:48.257041 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vbwx4" Feb 27 09:03:48 crc kubenswrapper[4906]: I0227 09:03:48.297978 4906 scope.go:117] "RemoveContainer" containerID="245f988398e8e29b13827e0525505b4b38fa6e041a21fff231e661aabedee7d0" Feb 27 09:03:48 crc kubenswrapper[4906]: I0227 09:03:48.300438 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vbwx4"] Feb 27 09:03:48 crc kubenswrapper[4906]: I0227 09:03:48.317701 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vbwx4"] Feb 27 09:03:48 crc kubenswrapper[4906]: I0227 09:03:48.331487 4906 scope.go:117] "RemoveContainer" containerID="f11058bf96a01d1c68b497153be0fd1d4336e939802f340a7a294bab26044597" Feb 27 09:03:48 crc kubenswrapper[4906]: I0227 09:03:48.377153 4906 scope.go:117] "RemoveContainer" containerID="76905e1d36b381892159800a6a37871ae6ee0c58c6a37078e7cbd4d02b517699" Feb 27 09:03:48 crc kubenswrapper[4906]: E0227 09:03:48.377940 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76905e1d36b381892159800a6a37871ae6ee0c58c6a37078e7cbd4d02b517699\": container with ID starting with 76905e1d36b381892159800a6a37871ae6ee0c58c6a37078e7cbd4d02b517699 not found: ID does not exist" containerID="76905e1d36b381892159800a6a37871ae6ee0c58c6a37078e7cbd4d02b517699" Feb 27 09:03:48 crc kubenswrapper[4906]: I0227 09:03:48.378022 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76905e1d36b381892159800a6a37871ae6ee0c58c6a37078e7cbd4d02b517699"} err="failed to get container status \"76905e1d36b381892159800a6a37871ae6ee0c58c6a37078e7cbd4d02b517699\": rpc error: code = NotFound desc = could not find container \"76905e1d36b381892159800a6a37871ae6ee0c58c6a37078e7cbd4d02b517699\": container with ID starting with 76905e1d36b381892159800a6a37871ae6ee0c58c6a37078e7cbd4d02b517699 not found: ID does not exist" Feb 27 09:03:48 crc kubenswrapper[4906]: I0227 09:03:48.378077 4906 scope.go:117] "RemoveContainer" containerID="245f988398e8e29b13827e0525505b4b38fa6e041a21fff231e661aabedee7d0" Feb 27 09:03:48 crc kubenswrapper[4906]: E0227 09:03:48.378571 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"245f988398e8e29b13827e0525505b4b38fa6e041a21fff231e661aabedee7d0\": container with ID starting with 245f988398e8e29b13827e0525505b4b38fa6e041a21fff231e661aabedee7d0 not found: ID does not exist" containerID="245f988398e8e29b13827e0525505b4b38fa6e041a21fff231e661aabedee7d0" Feb 27 09:03:48 crc kubenswrapper[4906]: I0227 09:03:48.378611 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"245f988398e8e29b13827e0525505b4b38fa6e041a21fff231e661aabedee7d0"} err="failed to get container status \"245f988398e8e29b13827e0525505b4b38fa6e041a21fff231e661aabedee7d0\": rpc error: code = NotFound desc = could not find container \"245f988398e8e29b13827e0525505b4b38fa6e041a21fff231e661aabedee7d0\": container with ID starting with 245f988398e8e29b13827e0525505b4b38fa6e041a21fff231e661aabedee7d0 not found: ID does not exist" Feb 27 09:03:48 crc kubenswrapper[4906]: I0227 09:03:48.378643 4906 scope.go:117] "RemoveContainer" containerID="f11058bf96a01d1c68b497153be0fd1d4336e939802f340a7a294bab26044597" Feb 27 09:03:48 crc kubenswrapper[4906]: E0227 09:03:48.379004 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f11058bf96a01d1c68b497153be0fd1d4336e939802f340a7a294bab26044597\": container with ID starting with f11058bf96a01d1c68b497153be0fd1d4336e939802f340a7a294bab26044597 not found: ID does not exist" containerID="f11058bf96a01d1c68b497153be0fd1d4336e939802f340a7a294bab26044597" Feb 27 09:03:48 crc kubenswrapper[4906]: I0227 09:03:48.379029 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f11058bf96a01d1c68b497153be0fd1d4336e939802f340a7a294bab26044597"} err="failed to get container status \"f11058bf96a01d1c68b497153be0fd1d4336e939802f340a7a294bab26044597\": rpc error: code = NotFound desc = could not find container \"f11058bf96a01d1c68b497153be0fd1d4336e939802f340a7a294bab26044597\": container with ID starting with f11058bf96a01d1c68b497153be0fd1d4336e939802f340a7a294bab26044597 not found: ID does not exist" Feb 27 09:03:48 crc kubenswrapper[4906]: I0227 09:03:48.565248 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7" path="/var/lib/kubelet/pods/2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7/volumes" Feb 27 09:04:00 crc kubenswrapper[4906]: I0227 09:04:00.151519 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536384-k5pxk"] Feb 27 09:04:00 crc kubenswrapper[4906]: E0227 09:04:00.152927 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7" containerName="registry-server" Feb 27 09:04:00 crc kubenswrapper[4906]: I0227 09:04:00.152944 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7" containerName="registry-server" Feb 27 09:04:00 crc kubenswrapper[4906]: E0227 09:04:00.152958 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7" containerName="extract-content" Feb 27 09:04:00 crc kubenswrapper[4906]: I0227 09:04:00.152965 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7" containerName="extract-content" Feb 27 09:04:00 crc kubenswrapper[4906]: E0227 09:04:00.153010 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7" containerName="extract-utilities" Feb 27 09:04:00 crc kubenswrapper[4906]: I0227 09:04:00.153016 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7" containerName="extract-utilities" Feb 27 09:04:00 crc kubenswrapper[4906]: I0227 09:04:00.153220 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a6878a0-b8e3-4a30-9b9a-48ce5201a0f7" containerName="registry-server" Feb 27 09:04:00 crc kubenswrapper[4906]: I0227 09:04:00.154226 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536384-k5pxk" Feb 27 09:04:00 crc kubenswrapper[4906]: I0227 09:04:00.157313 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 09:04:00 crc kubenswrapper[4906]: I0227 09:04:00.159145 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 09:04:00 crc kubenswrapper[4906]: I0227 09:04:00.159430 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 09:04:00 crc kubenswrapper[4906]: I0227 09:04:00.166341 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536384-k5pxk"] Feb 27 09:04:00 crc kubenswrapper[4906]: I0227 09:04:00.269099 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chqkc\" (UniqueName: \"kubernetes.io/projected/204f0880-105e-4948-a07c-fd24a322842a-kube-api-access-chqkc\") pod \"auto-csr-approver-29536384-k5pxk\" (UID: \"204f0880-105e-4948-a07c-fd24a322842a\") " pod="openshift-infra/auto-csr-approver-29536384-k5pxk" Feb 27 09:04:00 crc kubenswrapper[4906]: I0227 09:04:00.372829 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chqkc\" (UniqueName: \"kubernetes.io/projected/204f0880-105e-4948-a07c-fd24a322842a-kube-api-access-chqkc\") pod \"auto-csr-approver-29536384-k5pxk\" (UID: \"204f0880-105e-4948-a07c-fd24a322842a\") " pod="openshift-infra/auto-csr-approver-29536384-k5pxk" Feb 27 09:04:00 crc kubenswrapper[4906]: I0227 09:04:00.395927 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chqkc\" (UniqueName: \"kubernetes.io/projected/204f0880-105e-4948-a07c-fd24a322842a-kube-api-access-chqkc\") pod \"auto-csr-approver-29536384-k5pxk\" (UID: \"204f0880-105e-4948-a07c-fd24a322842a\") " pod="openshift-infra/auto-csr-approver-29536384-k5pxk" Feb 27 09:04:00 crc kubenswrapper[4906]: I0227 09:04:00.480711 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536384-k5pxk" Feb 27 09:04:01 crc kubenswrapper[4906]: I0227 09:04:01.017727 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536384-k5pxk"] Feb 27 09:04:01 crc kubenswrapper[4906]: I0227 09:04:01.404569 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536384-k5pxk" event={"ID":"204f0880-105e-4948-a07c-fd24a322842a","Type":"ContainerStarted","Data":"1ea5355895fcbb7ea3fdaa5c5a1ff8195c6f7ab3e2fbe3531dcb69811f8ef774"} Feb 27 09:04:02 crc kubenswrapper[4906]: I0227 09:04:02.422772 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536384-k5pxk" event={"ID":"204f0880-105e-4948-a07c-fd24a322842a","Type":"ContainerStarted","Data":"6f4c2d1fff4fc6e5c61386c92d056a75c750d0b032f9abe3e5faaea202f04c0b"} Feb 27 09:04:02 crc kubenswrapper[4906]: I0227 09:04:02.443112 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-infra/auto-csr-approver-29536384-k5pxk" podStartSLOduration=1.582095712 podStartE2EDuration="2.443087928s" podCreationTimestamp="2026-02-27 09:04:00 +0000 UTC" firstStartedPulling="2026-02-27 09:04:01.022364714 +0000 UTC m=+2139.416766324" lastFinishedPulling="2026-02-27 09:04:01.88335693 +0000 UTC m=+2140.277758540" observedRunningTime="2026-02-27 09:04:02.441348893 +0000 UTC m=+2140.835750503" watchObservedRunningTime="2026-02-27 09:04:02.443087928 +0000 UTC m=+2140.837489538" Feb 27 09:04:03 crc kubenswrapper[4906]: I0227 09:04:03.040335 4906 scope.go:117] "RemoveContainer" containerID="155a4e047dac2c401b8f299eb631cf5e030fa9aa5a6ce35a4e8ed5a7b5c28f94" Feb 27 09:04:03 crc kubenswrapper[4906]: I0227 09:04:03.099452 4906 scope.go:117] "RemoveContainer" containerID="4f161e1ad93059ba80d68c27e36baaf34205e7be85744e29f0c874ca877875a9" Feb 27 09:04:03 crc kubenswrapper[4906]: I0227 09:04:03.435005 4906 generic.go:334] "Generic (PLEG): container finished" podID="204f0880-105e-4948-a07c-fd24a322842a" containerID="6f4c2d1fff4fc6e5c61386c92d056a75c750d0b032f9abe3e5faaea202f04c0b" exitCode=0 Feb 27 09:04:03 crc kubenswrapper[4906]: I0227 09:04:03.435132 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536384-k5pxk" event={"ID":"204f0880-105e-4948-a07c-fd24a322842a","Type":"ContainerDied","Data":"6f4c2d1fff4fc6e5c61386c92d056a75c750d0b032f9abe3e5faaea202f04c0b"} Feb 27 09:04:04 crc kubenswrapper[4906]: I0227 09:04:04.851684 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536384-k5pxk" Feb 27 09:04:04 crc kubenswrapper[4906]: I0227 09:04:04.892847 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-chqkc\" (UniqueName: \"kubernetes.io/projected/204f0880-105e-4948-a07c-fd24a322842a-kube-api-access-chqkc\") pod \"204f0880-105e-4948-a07c-fd24a322842a\" (UID: \"204f0880-105e-4948-a07c-fd24a322842a\") " Feb 27 09:04:04 crc kubenswrapper[4906]: I0227 09:04:04.899758 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/204f0880-105e-4948-a07c-fd24a322842a-kube-api-access-chqkc" (OuterVolumeSpecName: "kube-api-access-chqkc") pod "204f0880-105e-4948-a07c-fd24a322842a" (UID: "204f0880-105e-4948-a07c-fd24a322842a"). InnerVolumeSpecName "kube-api-access-chqkc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:04:04 crc kubenswrapper[4906]: I0227 09:04:04.995709 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-chqkc\" (UniqueName: \"kubernetes.io/projected/204f0880-105e-4948-a07c-fd24a322842a-kube-api-access-chqkc\") on node \"crc\" DevicePath \"\"" Feb 27 09:04:05 crc kubenswrapper[4906]: I0227 09:04:05.466784 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536384-k5pxk" event={"ID":"204f0880-105e-4948-a07c-fd24a322842a","Type":"ContainerDied","Data":"1ea5355895fcbb7ea3fdaa5c5a1ff8195c6f7ab3e2fbe3531dcb69811f8ef774"} Feb 27 09:04:05 crc kubenswrapper[4906]: I0227 09:04:05.466849 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ea5355895fcbb7ea3fdaa5c5a1ff8195c6f7ab3e2fbe3531dcb69811f8ef774" Feb 27 09:04:05 crc kubenswrapper[4906]: I0227 09:04:05.466872 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536384-k5pxk" Feb 27 09:04:05 crc kubenswrapper[4906]: I0227 09:04:05.528444 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536378-h48j4"] Feb 27 09:04:05 crc kubenswrapper[4906]: I0227 09:04:05.541328 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536378-h48j4"] Feb 27 09:04:06 crc kubenswrapper[4906]: I0227 09:04:06.481657 4906 generic.go:334] "Generic (PLEG): container finished" podID="5988b580-0d6e-4c0f-9843-7088d1329575" containerID="4fe68e5a3d52318428bbe56142ccbd92e571a30bb75373248e8537bda714e6cc" exitCode=0 Feb 27 09:04:06 crc kubenswrapper[4906]: I0227 09:04:06.481714 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj" event={"ID":"5988b580-0d6e-4c0f-9843-7088d1329575","Type":"ContainerDied","Data":"4fe68e5a3d52318428bbe56142ccbd92e571a30bb75373248e8537bda714e6cc"} Feb 27 09:04:06 crc kubenswrapper[4906]: I0227 09:04:06.564725 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854" path="/var/lib/kubelet/pods/ed4c3b7b-6afc-4f40-a7cc-3d9e9624e854/volumes" Feb 27 09:04:07 crc kubenswrapper[4906]: I0227 09:04:07.936651 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj" Feb 27 09:04:07 crc kubenswrapper[4906]: I0227 09:04:07.967364 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5cdm\" (UniqueName: \"kubernetes.io/projected/5988b580-0d6e-4c0f-9843-7088d1329575-kube-api-access-k5cdm\") pod \"5988b580-0d6e-4c0f-9843-7088d1329575\" (UID: \"5988b580-0d6e-4c0f-9843-7088d1329575\") " Feb 27 09:04:07 crc kubenswrapper[4906]: I0227 09:04:07.967425 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5988b580-0d6e-4c0f-9843-7088d1329575-ssh-key-openstack-edpm-ipam\") pod \"5988b580-0d6e-4c0f-9843-7088d1329575\" (UID: \"5988b580-0d6e-4c0f-9843-7088d1329575\") " Feb 27 09:04:07 crc kubenswrapper[4906]: I0227 09:04:07.967631 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5988b580-0d6e-4c0f-9843-7088d1329575-inventory\") pod \"5988b580-0d6e-4c0f-9843-7088d1329575\" (UID: \"5988b580-0d6e-4c0f-9843-7088d1329575\") " Feb 27 09:04:07 crc kubenswrapper[4906]: I0227 09:04:07.975790 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5988b580-0d6e-4c0f-9843-7088d1329575-kube-api-access-k5cdm" (OuterVolumeSpecName: "kube-api-access-k5cdm") pod "5988b580-0d6e-4c0f-9843-7088d1329575" (UID: "5988b580-0d6e-4c0f-9843-7088d1329575"). InnerVolumeSpecName "kube-api-access-k5cdm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.001324 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5988b580-0d6e-4c0f-9843-7088d1329575-inventory" (OuterVolumeSpecName: "inventory") pod "5988b580-0d6e-4c0f-9843-7088d1329575" (UID: "5988b580-0d6e-4c0f-9843-7088d1329575"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.005817 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5988b580-0d6e-4c0f-9843-7088d1329575-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "5988b580-0d6e-4c0f-9843-7088d1329575" (UID: "5988b580-0d6e-4c0f-9843-7088d1329575"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.078496 4906 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5988b580-0d6e-4c0f-9843-7088d1329575-inventory\") on node \"crc\" DevicePath \"\"" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.078538 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5cdm\" (UniqueName: \"kubernetes.io/projected/5988b580-0d6e-4c0f-9843-7088d1329575-kube-api-access-k5cdm\") on node \"crc\" DevicePath \"\"" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.078551 4906 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5988b580-0d6e-4c0f-9843-7088d1329575-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.502065 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj" event={"ID":"5988b580-0d6e-4c0f-9843-7088d1329575","Type":"ContainerDied","Data":"963a068bf1b5f0e99161f769f1374c956733631364cdec6a586eac4767634996"} Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.502183 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="963a068bf1b5f0e99161f769f1374c956733631364cdec6a586eac4767634996" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.502499 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.673000 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-z2blf"] Feb 27 09:04:08 crc kubenswrapper[4906]: E0227 09:04:08.673428 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="204f0880-105e-4948-a07c-fd24a322842a" containerName="oc" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.673455 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="204f0880-105e-4948-a07c-fd24a322842a" containerName="oc" Feb 27 09:04:08 crc kubenswrapper[4906]: E0227 09:04:08.673473 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5988b580-0d6e-4c0f-9843-7088d1329575" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.673483 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="5988b580-0d6e-4c0f-9843-7088d1329575" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.673689 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="204f0880-105e-4948-a07c-fd24a322842a" containerName="oc" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.673707 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="5988b580-0d6e-4c0f-9843-7088d1329575" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.674461 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-z2blf" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.678052 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.678425 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.678579 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d466j" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.679920 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.708501 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-z2blf"] Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.795047 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/4ac686ba-92d6-4672-bd2e-936f1e9d15ba-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-z2blf\" (UID: \"4ac686ba-92d6-4672-bd2e-936f1e9d15ba\") " pod="openstack/ssh-known-hosts-edpm-deployment-z2blf" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.795157 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcmln\" (UniqueName: \"kubernetes.io/projected/4ac686ba-92d6-4672-bd2e-936f1e9d15ba-kube-api-access-tcmln\") pod \"ssh-known-hosts-edpm-deployment-z2blf\" (UID: \"4ac686ba-92d6-4672-bd2e-936f1e9d15ba\") " pod="openstack/ssh-known-hosts-edpm-deployment-z2blf" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.795290 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/4ac686ba-92d6-4672-bd2e-936f1e9d15ba-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-z2blf\" (UID: \"4ac686ba-92d6-4672-bd2e-936f1e9d15ba\") " pod="openstack/ssh-known-hosts-edpm-deployment-z2blf" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.897947 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/4ac686ba-92d6-4672-bd2e-936f1e9d15ba-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-z2blf\" (UID: \"4ac686ba-92d6-4672-bd2e-936f1e9d15ba\") " pod="openstack/ssh-known-hosts-edpm-deployment-z2blf" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.898049 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcmln\" (UniqueName: \"kubernetes.io/projected/4ac686ba-92d6-4672-bd2e-936f1e9d15ba-kube-api-access-tcmln\") pod \"ssh-known-hosts-edpm-deployment-z2blf\" (UID: \"4ac686ba-92d6-4672-bd2e-936f1e9d15ba\") " pod="openstack/ssh-known-hosts-edpm-deployment-z2blf" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.898214 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/4ac686ba-92d6-4672-bd2e-936f1e9d15ba-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-z2blf\" (UID: \"4ac686ba-92d6-4672-bd2e-936f1e9d15ba\") " pod="openstack/ssh-known-hosts-edpm-deployment-z2blf" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.903418 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/4ac686ba-92d6-4672-bd2e-936f1e9d15ba-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-z2blf\" (UID: \"4ac686ba-92d6-4672-bd2e-936f1e9d15ba\") " pod="openstack/ssh-known-hosts-edpm-deployment-z2blf" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.909328 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/4ac686ba-92d6-4672-bd2e-936f1e9d15ba-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-z2blf\" (UID: \"4ac686ba-92d6-4672-bd2e-936f1e9d15ba\") " pod="openstack/ssh-known-hosts-edpm-deployment-z2blf" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.919382 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcmln\" (UniqueName: \"kubernetes.io/projected/4ac686ba-92d6-4672-bd2e-936f1e9d15ba-kube-api-access-tcmln\") pod \"ssh-known-hosts-edpm-deployment-z2blf\" (UID: \"4ac686ba-92d6-4672-bd2e-936f1e9d15ba\") " pod="openstack/ssh-known-hosts-edpm-deployment-z2blf" Feb 27 09:04:08 crc kubenswrapper[4906]: I0227 09:04:08.995205 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-z2blf" Feb 27 09:04:09 crc kubenswrapper[4906]: I0227 09:04:09.039926 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-v4w5x"] Feb 27 09:04:09 crc kubenswrapper[4906]: I0227 09:04:09.048929 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-v4w5x"] Feb 27 09:04:09 crc kubenswrapper[4906]: I0227 09:04:09.625554 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-z2blf"] Feb 27 09:04:10 crc kubenswrapper[4906]: I0227 09:04:10.521141 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-z2blf" event={"ID":"4ac686ba-92d6-4672-bd2e-936f1e9d15ba","Type":"ContainerStarted","Data":"1b7ee04515244b68494cfab117c143bba77788b6439d8a26dbfcac4964553b9d"} Feb 27 09:04:10 crc kubenswrapper[4906]: I0227 09:04:10.567090 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14371a49-0d1f-4596-9d93-5470dfbdb6c4" path="/var/lib/kubelet/pods/14371a49-0d1f-4596-9d93-5470dfbdb6c4/volumes" Feb 27 09:04:11 crc kubenswrapper[4906]: I0227 09:04:11.534559 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-z2blf" event={"ID":"4ac686ba-92d6-4672-bd2e-936f1e9d15ba","Type":"ContainerStarted","Data":"7d0f5808eb2167b3eb77a5369d376a1d3baa49883d24c8566ceae73cc67c845c"} Feb 27 09:04:11 crc kubenswrapper[4906]: I0227 09:04:11.557797 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-z2blf" podStartSLOduration=2.844244454 podStartE2EDuration="3.557773933s" podCreationTimestamp="2026-02-27 09:04:08 +0000 UTC" firstStartedPulling="2026-02-27 09:04:09.624485671 +0000 UTC m=+2148.018887281" lastFinishedPulling="2026-02-27 09:04:10.33801515 +0000 UTC m=+2148.732416760" observedRunningTime="2026-02-27 09:04:11.554083706 +0000 UTC m=+2149.948485306" watchObservedRunningTime="2026-02-27 09:04:11.557773933 +0000 UTC m=+2149.952175573" Feb 27 09:04:17 crc kubenswrapper[4906]: I0227 09:04:17.600143 4906 generic.go:334] "Generic (PLEG): container finished" podID="4ac686ba-92d6-4672-bd2e-936f1e9d15ba" containerID="7d0f5808eb2167b3eb77a5369d376a1d3baa49883d24c8566ceae73cc67c845c" exitCode=0 Feb 27 09:04:17 crc kubenswrapper[4906]: I0227 09:04:17.600236 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-z2blf" event={"ID":"4ac686ba-92d6-4672-bd2e-936f1e9d15ba","Type":"ContainerDied","Data":"7d0f5808eb2167b3eb77a5369d376a1d3baa49883d24c8566ceae73cc67c845c"} Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.067438 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-z2blf" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.175803 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/4ac686ba-92d6-4672-bd2e-936f1e9d15ba-inventory-0\") pod \"4ac686ba-92d6-4672-bd2e-936f1e9d15ba\" (UID: \"4ac686ba-92d6-4672-bd2e-936f1e9d15ba\") " Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.175917 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/4ac686ba-92d6-4672-bd2e-936f1e9d15ba-ssh-key-openstack-edpm-ipam\") pod \"4ac686ba-92d6-4672-bd2e-936f1e9d15ba\" (UID: \"4ac686ba-92d6-4672-bd2e-936f1e9d15ba\") " Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.176102 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcmln\" (UniqueName: \"kubernetes.io/projected/4ac686ba-92d6-4672-bd2e-936f1e9d15ba-kube-api-access-tcmln\") pod \"4ac686ba-92d6-4672-bd2e-936f1e9d15ba\" (UID: \"4ac686ba-92d6-4672-bd2e-936f1e9d15ba\") " Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.184732 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ac686ba-92d6-4672-bd2e-936f1e9d15ba-kube-api-access-tcmln" (OuterVolumeSpecName: "kube-api-access-tcmln") pod "4ac686ba-92d6-4672-bd2e-936f1e9d15ba" (UID: "4ac686ba-92d6-4672-bd2e-936f1e9d15ba"). InnerVolumeSpecName "kube-api-access-tcmln". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.208621 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ac686ba-92d6-4672-bd2e-936f1e9d15ba-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "4ac686ba-92d6-4672-bd2e-936f1e9d15ba" (UID: "4ac686ba-92d6-4672-bd2e-936f1e9d15ba"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.210474 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ac686ba-92d6-4672-bd2e-936f1e9d15ba-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "4ac686ba-92d6-4672-bd2e-936f1e9d15ba" (UID: "4ac686ba-92d6-4672-bd2e-936f1e9d15ba"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.278835 4906 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/4ac686ba-92d6-4672-bd2e-936f1e9d15ba-inventory-0\") on node \"crc\" DevicePath \"\"" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.279024 4906 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/4ac686ba-92d6-4672-bd2e-936f1e9d15ba-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.279043 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcmln\" (UniqueName: \"kubernetes.io/projected/4ac686ba-92d6-4672-bd2e-936f1e9d15ba-kube-api-access-tcmln\") on node \"crc\" DevicePath \"\"" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.625323 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-z2blf" event={"ID":"4ac686ba-92d6-4672-bd2e-936f1e9d15ba","Type":"ContainerDied","Data":"1b7ee04515244b68494cfab117c143bba77788b6439d8a26dbfcac4964553b9d"} Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.625389 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1b7ee04515244b68494cfab117c143bba77788b6439d8a26dbfcac4964553b9d" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.625973 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-z2blf" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.711870 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t"] Feb 27 09:04:19 crc kubenswrapper[4906]: E0227 09:04:19.712463 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ac686ba-92d6-4672-bd2e-936f1e9d15ba" containerName="ssh-known-hosts-edpm-deployment" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.712491 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ac686ba-92d6-4672-bd2e-936f1e9d15ba" containerName="ssh-known-hosts-edpm-deployment" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.712739 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ac686ba-92d6-4672-bd2e-936f1e9d15ba" containerName="ssh-known-hosts-edpm-deployment" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.713630 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.717116 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.717160 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.720298 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.725819 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d466j" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.728365 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t"] Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.894530 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f8dfe1f-95da-435f-aedf-7319de0cea38-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-hzn5t\" (UID: \"5f8dfe1f-95da-435f-aedf-7319de0cea38\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.895180 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5f8dfe1f-95da-435f-aedf-7319de0cea38-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-hzn5t\" (UID: \"5f8dfe1f-95da-435f-aedf-7319de0cea38\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.895242 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66qj4\" (UniqueName: \"kubernetes.io/projected/5f8dfe1f-95da-435f-aedf-7319de0cea38-kube-api-access-66qj4\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-hzn5t\" (UID: \"5f8dfe1f-95da-435f-aedf-7319de0cea38\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.997270 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5f8dfe1f-95da-435f-aedf-7319de0cea38-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-hzn5t\" (UID: \"5f8dfe1f-95da-435f-aedf-7319de0cea38\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.997417 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66qj4\" (UniqueName: \"kubernetes.io/projected/5f8dfe1f-95da-435f-aedf-7319de0cea38-kube-api-access-66qj4\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-hzn5t\" (UID: \"5f8dfe1f-95da-435f-aedf-7319de0cea38\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t" Feb 27 09:04:19 crc kubenswrapper[4906]: I0227 09:04:19.997667 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f8dfe1f-95da-435f-aedf-7319de0cea38-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-hzn5t\" (UID: \"5f8dfe1f-95da-435f-aedf-7319de0cea38\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t" Feb 27 09:04:20 crc kubenswrapper[4906]: I0227 09:04:20.002264 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5f8dfe1f-95da-435f-aedf-7319de0cea38-ssh-key-openstack-edpm-ipam\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-hzn5t\" (UID: \"5f8dfe1f-95da-435f-aedf-7319de0cea38\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t" Feb 27 09:04:20 crc kubenswrapper[4906]: I0227 09:04:20.005616 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f8dfe1f-95da-435f-aedf-7319de0cea38-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-hzn5t\" (UID: \"5f8dfe1f-95da-435f-aedf-7319de0cea38\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t" Feb 27 09:04:20 crc kubenswrapper[4906]: I0227 09:04:20.014000 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66qj4\" (UniqueName: \"kubernetes.io/projected/5f8dfe1f-95da-435f-aedf-7319de0cea38-kube-api-access-66qj4\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-hzn5t\" (UID: \"5f8dfe1f-95da-435f-aedf-7319de0cea38\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t" Feb 27 09:04:20 crc kubenswrapper[4906]: I0227 09:04:20.040357 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t" Feb 27 09:04:20 crc kubenswrapper[4906]: I0227 09:04:20.593221 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t"] Feb 27 09:04:20 crc kubenswrapper[4906]: I0227 09:04:20.637537 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t" event={"ID":"5f8dfe1f-95da-435f-aedf-7319de0cea38","Type":"ContainerStarted","Data":"06f91509e4dccae73b6674226f1f22e85973893f8667bd2e602aefcfc8098aec"} Feb 27 09:04:21 crc kubenswrapper[4906]: I0227 09:04:21.652084 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t" event={"ID":"5f8dfe1f-95da-435f-aedf-7319de0cea38","Type":"ContainerStarted","Data":"5d291400d6ecb8c5d63ded9436bc90658ccd95247bbfea58325cb2fe473dfd88"} Feb 27 09:04:21 crc kubenswrapper[4906]: I0227 09:04:21.680874 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t" podStartSLOduration=2.199267492 podStartE2EDuration="2.680827286s" podCreationTimestamp="2026-02-27 09:04:19 +0000 UTC" firstStartedPulling="2026-02-27 09:04:20.600404205 +0000 UTC m=+2158.994805815" lastFinishedPulling="2026-02-27 09:04:21.081963999 +0000 UTC m=+2159.476365609" observedRunningTime="2026-02-27 09:04:21.671619154 +0000 UTC m=+2160.066020764" watchObservedRunningTime="2026-02-27 09:04:21.680827286 +0000 UTC m=+2160.075228906" Feb 27 09:04:29 crc kubenswrapper[4906]: I0227 09:04:29.757326 4906 generic.go:334] "Generic (PLEG): container finished" podID="5f8dfe1f-95da-435f-aedf-7319de0cea38" containerID="5d291400d6ecb8c5d63ded9436bc90658ccd95247bbfea58325cb2fe473dfd88" exitCode=0 Feb 27 09:04:29 crc kubenswrapper[4906]: I0227 09:04:29.758211 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t" event={"ID":"5f8dfe1f-95da-435f-aedf-7319de0cea38","Type":"ContainerDied","Data":"5d291400d6ecb8c5d63ded9436bc90658ccd95247bbfea58325cb2fe473dfd88"} Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.275191 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t" Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.382223 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f8dfe1f-95da-435f-aedf-7319de0cea38-inventory\") pod \"5f8dfe1f-95da-435f-aedf-7319de0cea38\" (UID: \"5f8dfe1f-95da-435f-aedf-7319de0cea38\") " Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.382281 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66qj4\" (UniqueName: \"kubernetes.io/projected/5f8dfe1f-95da-435f-aedf-7319de0cea38-kube-api-access-66qj4\") pod \"5f8dfe1f-95da-435f-aedf-7319de0cea38\" (UID: \"5f8dfe1f-95da-435f-aedf-7319de0cea38\") " Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.382478 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5f8dfe1f-95da-435f-aedf-7319de0cea38-ssh-key-openstack-edpm-ipam\") pod \"5f8dfe1f-95da-435f-aedf-7319de0cea38\" (UID: \"5f8dfe1f-95da-435f-aedf-7319de0cea38\") " Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.389676 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f8dfe1f-95da-435f-aedf-7319de0cea38-kube-api-access-66qj4" (OuterVolumeSpecName: "kube-api-access-66qj4") pod "5f8dfe1f-95da-435f-aedf-7319de0cea38" (UID: "5f8dfe1f-95da-435f-aedf-7319de0cea38"). InnerVolumeSpecName "kube-api-access-66qj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.415133 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f8dfe1f-95da-435f-aedf-7319de0cea38-inventory" (OuterVolumeSpecName: "inventory") pod "5f8dfe1f-95da-435f-aedf-7319de0cea38" (UID: "5f8dfe1f-95da-435f-aedf-7319de0cea38"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.416828 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f8dfe1f-95da-435f-aedf-7319de0cea38-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "5f8dfe1f-95da-435f-aedf-7319de0cea38" (UID: "5f8dfe1f-95da-435f-aedf-7319de0cea38"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.485533 4906 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f8dfe1f-95da-435f-aedf-7319de0cea38-inventory\") on node \"crc\" DevicePath \"\"" Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.485945 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66qj4\" (UniqueName: \"kubernetes.io/projected/5f8dfe1f-95da-435f-aedf-7319de0cea38-kube-api-access-66qj4\") on node \"crc\" DevicePath \"\"" Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.486017 4906 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/5f8dfe1f-95da-435f-aedf-7319de0cea38-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.781928 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t" event={"ID":"5f8dfe1f-95da-435f-aedf-7319de0cea38","Type":"ContainerDied","Data":"06f91509e4dccae73b6674226f1f22e85973893f8667bd2e602aefcfc8098aec"} Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.781984 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06f91509e4dccae73b6674226f1f22e85973893f8667bd2e602aefcfc8098aec" Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.781987 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-hzn5t" Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.889500 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s"] Feb 27 09:04:31 crc kubenswrapper[4906]: E0227 09:04:31.890045 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f8dfe1f-95da-435f-aedf-7319de0cea38" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.890070 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f8dfe1f-95da-435f-aedf-7319de0cea38" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.890256 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f8dfe1f-95da-435f-aedf-7319de0cea38" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.891188 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s" Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.900948 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d466j" Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.901214 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.901370 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.901541 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 27 09:04:31 crc kubenswrapper[4906]: I0227 09:04:31.908155 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s"] Feb 27 09:04:32 crc kubenswrapper[4906]: I0227 09:04:32.003520 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d4g9s\" (UniqueName: \"kubernetes.io/projected/e27eec9c-ffe0-409a-95c6-ebbf293c7a7d-kube-api-access-d4g9s\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s\" (UID: \"e27eec9c-ffe0-409a-95c6-ebbf293c7a7d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s" Feb 27 09:04:32 crc kubenswrapper[4906]: I0227 09:04:32.003639 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e27eec9c-ffe0-409a-95c6-ebbf293c7a7d-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s\" (UID: \"e27eec9c-ffe0-409a-95c6-ebbf293c7a7d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s" Feb 27 09:04:32 crc kubenswrapper[4906]: I0227 09:04:32.003690 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e27eec9c-ffe0-409a-95c6-ebbf293c7a7d-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s\" (UID: \"e27eec9c-ffe0-409a-95c6-ebbf293c7a7d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s" Feb 27 09:04:32 crc kubenswrapper[4906]: I0227 09:04:32.106765 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e27eec9c-ffe0-409a-95c6-ebbf293c7a7d-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s\" (UID: \"e27eec9c-ffe0-409a-95c6-ebbf293c7a7d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s" Feb 27 09:04:32 crc kubenswrapper[4906]: I0227 09:04:32.107570 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d4g9s\" (UniqueName: \"kubernetes.io/projected/e27eec9c-ffe0-409a-95c6-ebbf293c7a7d-kube-api-access-d4g9s\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s\" (UID: \"e27eec9c-ffe0-409a-95c6-ebbf293c7a7d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s" Feb 27 09:04:32 crc kubenswrapper[4906]: I0227 09:04:32.107650 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e27eec9c-ffe0-409a-95c6-ebbf293c7a7d-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s\" (UID: \"e27eec9c-ffe0-409a-95c6-ebbf293c7a7d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s" Feb 27 09:04:32 crc kubenswrapper[4906]: I0227 09:04:32.113823 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e27eec9c-ffe0-409a-95c6-ebbf293c7a7d-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s\" (UID: \"e27eec9c-ffe0-409a-95c6-ebbf293c7a7d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s" Feb 27 09:04:32 crc kubenswrapper[4906]: I0227 09:04:32.115586 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e27eec9c-ffe0-409a-95c6-ebbf293c7a7d-ssh-key-openstack-edpm-ipam\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s\" (UID: \"e27eec9c-ffe0-409a-95c6-ebbf293c7a7d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s" Feb 27 09:04:32 crc kubenswrapper[4906]: I0227 09:04:32.133271 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d4g9s\" (UniqueName: \"kubernetes.io/projected/e27eec9c-ffe0-409a-95c6-ebbf293c7a7d-kube-api-access-d4g9s\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s\" (UID: \"e27eec9c-ffe0-409a-95c6-ebbf293c7a7d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s" Feb 27 09:04:32 crc kubenswrapper[4906]: I0227 09:04:32.219600 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s" Feb 27 09:04:32 crc kubenswrapper[4906]: I0227 09:04:32.766647 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s"] Feb 27 09:04:32 crc kubenswrapper[4906]: I0227 09:04:32.805699 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s" event={"ID":"e27eec9c-ffe0-409a-95c6-ebbf293c7a7d","Type":"ContainerStarted","Data":"057d987446b5c2f1e07c7180ba0b0605253ed934f1fc0f8f5b546dac825803be"} Feb 27 09:04:34 crc kubenswrapper[4906]: I0227 09:04:34.826313 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s" event={"ID":"e27eec9c-ffe0-409a-95c6-ebbf293c7a7d","Type":"ContainerStarted","Data":"755d3196f3b50134aee0a5ca8bb31d57db4df6ea1ac925260e8a3866527d30c8"} Feb 27 09:04:34 crc kubenswrapper[4906]: I0227 09:04:34.855755 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s" podStartSLOduration=3.068439936 podStartE2EDuration="3.855724935s" podCreationTimestamp="2026-02-27 09:04:31 +0000 UTC" firstStartedPulling="2026-02-27 09:04:32.776136818 +0000 UTC m=+2171.170538438" lastFinishedPulling="2026-02-27 09:04:33.563421827 +0000 UTC m=+2171.957823437" observedRunningTime="2026-02-27 09:04:34.845459985 +0000 UTC m=+2173.239861595" watchObservedRunningTime="2026-02-27 09:04:34.855724935 +0000 UTC m=+2173.250126545" Feb 27 09:04:44 crc kubenswrapper[4906]: I0227 09:04:44.718778 4906 generic.go:334] "Generic (PLEG): container finished" podID="e27eec9c-ffe0-409a-95c6-ebbf293c7a7d" containerID="755d3196f3b50134aee0a5ca8bb31d57db4df6ea1ac925260e8a3866527d30c8" exitCode=0 Feb 27 09:04:44 crc kubenswrapper[4906]: I0227 09:04:44.718865 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s" event={"ID":"e27eec9c-ffe0-409a-95c6-ebbf293c7a7d","Type":"ContainerDied","Data":"755d3196f3b50134aee0a5ca8bb31d57db4df6ea1ac925260e8a3866527d30c8"} Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.213042 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.313733 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4g9s\" (UniqueName: \"kubernetes.io/projected/e27eec9c-ffe0-409a-95c6-ebbf293c7a7d-kube-api-access-d4g9s\") pod \"e27eec9c-ffe0-409a-95c6-ebbf293c7a7d\" (UID: \"e27eec9c-ffe0-409a-95c6-ebbf293c7a7d\") " Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.313999 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e27eec9c-ffe0-409a-95c6-ebbf293c7a7d-ssh-key-openstack-edpm-ipam\") pod \"e27eec9c-ffe0-409a-95c6-ebbf293c7a7d\" (UID: \"e27eec9c-ffe0-409a-95c6-ebbf293c7a7d\") " Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.314028 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e27eec9c-ffe0-409a-95c6-ebbf293c7a7d-inventory\") pod \"e27eec9c-ffe0-409a-95c6-ebbf293c7a7d\" (UID: \"e27eec9c-ffe0-409a-95c6-ebbf293c7a7d\") " Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.321675 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e27eec9c-ffe0-409a-95c6-ebbf293c7a7d-kube-api-access-d4g9s" (OuterVolumeSpecName: "kube-api-access-d4g9s") pod "e27eec9c-ffe0-409a-95c6-ebbf293c7a7d" (UID: "e27eec9c-ffe0-409a-95c6-ebbf293c7a7d"). InnerVolumeSpecName "kube-api-access-d4g9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.346047 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e27eec9c-ffe0-409a-95c6-ebbf293c7a7d-inventory" (OuterVolumeSpecName: "inventory") pod "e27eec9c-ffe0-409a-95c6-ebbf293c7a7d" (UID: "e27eec9c-ffe0-409a-95c6-ebbf293c7a7d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.348939 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e27eec9c-ffe0-409a-95c6-ebbf293c7a7d-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "e27eec9c-ffe0-409a-95c6-ebbf293c7a7d" (UID: "e27eec9c-ffe0-409a-95c6-ebbf293c7a7d"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.417372 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4g9s\" (UniqueName: \"kubernetes.io/projected/e27eec9c-ffe0-409a-95c6-ebbf293c7a7d-kube-api-access-d4g9s\") on node \"crc\" DevicePath \"\"" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.417419 4906 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/e27eec9c-ffe0-409a-95c6-ebbf293c7a7d-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.417436 4906 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e27eec9c-ffe0-409a-95c6-ebbf293c7a7d-inventory\") on node \"crc\" DevicePath \"\"" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.743235 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s" event={"ID":"e27eec9c-ffe0-409a-95c6-ebbf293c7a7d","Type":"ContainerDied","Data":"057d987446b5c2f1e07c7180ba0b0605253ed934f1fc0f8f5b546dac825803be"} Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.743681 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="057d987446b5c2f1e07c7180ba0b0605253ed934f1fc0f8f5b546dac825803be" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.743405 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.914806 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4"] Feb 27 09:04:46 crc kubenswrapper[4906]: E0227 09:04:46.915281 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e27eec9c-ffe0-409a-95c6-ebbf293c7a7d" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.915305 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="e27eec9c-ffe0-409a-95c6-ebbf293c7a7d" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.915501 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="e27eec9c-ffe0-409a-95c6-ebbf293c7a7d" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.916276 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.918872 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.919049 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.918955 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.919581 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.921320 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.921633 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.921912 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d466j" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.927211 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Feb 27 09:04:46 crc kubenswrapper[4906]: I0227 09:04:46.931390 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4"] Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.031163 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.031282 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.031404 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.031513 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.031601 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.031718 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.031815 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.032000 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.032064 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mnjj\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-kube-api-access-9mnjj\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.032173 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.032230 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.032321 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.032360 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.032415 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.135231 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.136345 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.136389 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.136447 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.136480 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.136545 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.136606 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mnjj\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-kube-api-access-9mnjj\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.136657 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.136691 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.136752 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.136777 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.136802 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.136865 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.136902 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.142897 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.143929 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.144259 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.144389 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.145230 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.145407 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.146405 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.146439 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.146755 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.146912 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.148019 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-ssh-key-openstack-edpm-ipam\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.150059 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.152625 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.169028 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mnjj\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-kube-api-access-9mnjj\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.236536 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:04:47 crc kubenswrapper[4906]: I0227 09:04:47.767072 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4"] Feb 27 09:04:48 crc kubenswrapper[4906]: I0227 09:04:48.761997 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" event={"ID":"cce4f075-8cb4-4f72-a590-56f3d507eebf","Type":"ContainerStarted","Data":"99530c95d3298f4006905cd729e363aac55f87a14f5229d9fc3aa1c14ffe4b50"} Feb 27 09:04:48 crc kubenswrapper[4906]: I0227 09:04:48.762351 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" event={"ID":"cce4f075-8cb4-4f72-a590-56f3d507eebf","Type":"ContainerStarted","Data":"8981bd149e64766a7fa9f02e5a73f4287c4950d70f163984e5e6739ab1de0b8c"} Feb 27 09:04:48 crc kubenswrapper[4906]: I0227 09:04:48.806074 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" podStartSLOduration=2.413469455 podStartE2EDuration="2.806035911s" podCreationTimestamp="2026-02-27 09:04:46 +0000 UTC" firstStartedPulling="2026-02-27 09:04:47.77051761 +0000 UTC m=+2186.164919220" lastFinishedPulling="2026-02-27 09:04:48.163084066 +0000 UTC m=+2186.557485676" observedRunningTime="2026-02-27 09:04:48.797018724 +0000 UTC m=+2187.191420344" watchObservedRunningTime="2026-02-27 09:04:48.806035911 +0000 UTC m=+2187.200437561" Feb 27 09:05:03 crc kubenswrapper[4906]: I0227 09:05:03.216175 4906 scope.go:117] "RemoveContainer" containerID="d18bb50f3f80b1cfd19397b9fb3700fecde5c046b31d589dbf17546c53ffc5a9" Feb 27 09:05:03 crc kubenswrapper[4906]: I0227 09:05:03.281893 4906 scope.go:117] "RemoveContainer" containerID="93a45b4f4f18ed9a95dc1f8381529a21fe75715a5698a5d841105c824970914d" Feb 27 09:05:21 crc kubenswrapper[4906]: I0227 09:05:21.557459 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-75zpk"] Feb 27 09:05:21 crc kubenswrapper[4906]: I0227 09:05:21.585756 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-75zpk" Feb 27 09:05:21 crc kubenswrapper[4906]: I0227 09:05:21.586840 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-75zpk"] Feb 27 09:05:21 crc kubenswrapper[4906]: I0227 09:05:21.759790 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06848b27-bb9f-472b-8a29-cb683684f6d1-utilities\") pod \"community-operators-75zpk\" (UID: \"06848b27-bb9f-472b-8a29-cb683684f6d1\") " pod="openshift-marketplace/community-operators-75zpk" Feb 27 09:05:21 crc kubenswrapper[4906]: I0227 09:05:21.759895 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5p6b\" (UniqueName: \"kubernetes.io/projected/06848b27-bb9f-472b-8a29-cb683684f6d1-kube-api-access-f5p6b\") pod \"community-operators-75zpk\" (UID: \"06848b27-bb9f-472b-8a29-cb683684f6d1\") " pod="openshift-marketplace/community-operators-75zpk" Feb 27 09:05:21 crc kubenswrapper[4906]: I0227 09:05:21.759958 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06848b27-bb9f-472b-8a29-cb683684f6d1-catalog-content\") pod \"community-operators-75zpk\" (UID: \"06848b27-bb9f-472b-8a29-cb683684f6d1\") " pod="openshift-marketplace/community-operators-75zpk" Feb 27 09:05:21 crc kubenswrapper[4906]: I0227 09:05:21.862025 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06848b27-bb9f-472b-8a29-cb683684f6d1-catalog-content\") pod \"community-operators-75zpk\" (UID: \"06848b27-bb9f-472b-8a29-cb683684f6d1\") " pod="openshift-marketplace/community-operators-75zpk" Feb 27 09:05:21 crc kubenswrapper[4906]: I0227 09:05:21.862331 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06848b27-bb9f-472b-8a29-cb683684f6d1-utilities\") pod \"community-operators-75zpk\" (UID: \"06848b27-bb9f-472b-8a29-cb683684f6d1\") " pod="openshift-marketplace/community-operators-75zpk" Feb 27 09:05:21 crc kubenswrapper[4906]: I0227 09:05:21.862427 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5p6b\" (UniqueName: \"kubernetes.io/projected/06848b27-bb9f-472b-8a29-cb683684f6d1-kube-api-access-f5p6b\") pod \"community-operators-75zpk\" (UID: \"06848b27-bb9f-472b-8a29-cb683684f6d1\") " pod="openshift-marketplace/community-operators-75zpk" Feb 27 09:05:21 crc kubenswrapper[4906]: I0227 09:05:21.862717 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06848b27-bb9f-472b-8a29-cb683684f6d1-catalog-content\") pod \"community-operators-75zpk\" (UID: \"06848b27-bb9f-472b-8a29-cb683684f6d1\") " pod="openshift-marketplace/community-operators-75zpk" Feb 27 09:05:21 crc kubenswrapper[4906]: I0227 09:05:21.863138 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06848b27-bb9f-472b-8a29-cb683684f6d1-utilities\") pod \"community-operators-75zpk\" (UID: \"06848b27-bb9f-472b-8a29-cb683684f6d1\") " pod="openshift-marketplace/community-operators-75zpk" Feb 27 09:05:21 crc kubenswrapper[4906]: I0227 09:05:21.887478 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5p6b\" (UniqueName: \"kubernetes.io/projected/06848b27-bb9f-472b-8a29-cb683684f6d1-kube-api-access-f5p6b\") pod \"community-operators-75zpk\" (UID: \"06848b27-bb9f-472b-8a29-cb683684f6d1\") " pod="openshift-marketplace/community-operators-75zpk" Feb 27 09:05:21 crc kubenswrapper[4906]: I0227 09:05:21.914414 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-75zpk" Feb 27 09:05:22 crc kubenswrapper[4906]: I0227 09:05:22.465508 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-75zpk"] Feb 27 09:05:23 crc kubenswrapper[4906]: I0227 09:05:23.087765 4906 generic.go:334] "Generic (PLEG): container finished" podID="06848b27-bb9f-472b-8a29-cb683684f6d1" containerID="6e1d3f2566549e2f983f9fd2c21b99c8fb56b26914a11da15cc3a52a9fe030e3" exitCode=0 Feb 27 09:05:23 crc kubenswrapper[4906]: I0227 09:05:23.087835 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-75zpk" event={"ID":"06848b27-bb9f-472b-8a29-cb683684f6d1","Type":"ContainerDied","Data":"6e1d3f2566549e2f983f9fd2c21b99c8fb56b26914a11da15cc3a52a9fe030e3"} Feb 27 09:05:23 crc kubenswrapper[4906]: I0227 09:05:23.088080 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-75zpk" event={"ID":"06848b27-bb9f-472b-8a29-cb683684f6d1","Type":"ContainerStarted","Data":"58b540cf2a7010bc58c4602ec4a80c4b56a3d02b98e10058ad547125268f012f"} Feb 27 09:05:24 crc kubenswrapper[4906]: I0227 09:05:24.845521 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 09:05:24 crc kubenswrapper[4906]: I0227 09:05:24.845985 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 09:05:25 crc kubenswrapper[4906]: I0227 09:05:25.132458 4906 generic.go:334] "Generic (PLEG): container finished" podID="cce4f075-8cb4-4f72-a590-56f3d507eebf" containerID="99530c95d3298f4006905cd729e363aac55f87a14f5229d9fc3aa1c14ffe4b50" exitCode=0 Feb 27 09:05:25 crc kubenswrapper[4906]: I0227 09:05:25.132517 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" event={"ID":"cce4f075-8cb4-4f72-a590-56f3d507eebf","Type":"ContainerDied","Data":"99530c95d3298f4006905cd729e363aac55f87a14f5229d9fc3aa1c14ffe4b50"} Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.150337 4906 generic.go:334] "Generic (PLEG): container finished" podID="06848b27-bb9f-472b-8a29-cb683684f6d1" containerID="8c121c1bbf9d300f4f6925c3df2f02457e83de58be1291b8654b15989f722c5a" exitCode=0 Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.150403 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-75zpk" event={"ID":"06848b27-bb9f-472b-8a29-cb683684f6d1","Type":"ContainerDied","Data":"8c121c1bbf9d300f4f6925c3df2f02457e83de58be1291b8654b15989f722c5a"} Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.570467 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.660166 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"cce4f075-8cb4-4f72-a590-56f3d507eebf\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.660231 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-bootstrap-combined-ca-bundle\") pod \"cce4f075-8cb4-4f72-a590-56f3d507eebf\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.660280 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-ovn-default-certs-0\") pod \"cce4f075-8cb4-4f72-a590-56f3d507eebf\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.660301 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-neutron-metadata-combined-ca-bundle\") pod \"cce4f075-8cb4-4f72-a590-56f3d507eebf\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.660401 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-nova-combined-ca-bundle\") pod \"cce4f075-8cb4-4f72-a590-56f3d507eebf\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.660502 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mnjj\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-kube-api-access-9mnjj\") pod \"cce4f075-8cb4-4f72-a590-56f3d507eebf\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.660551 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-ovn-combined-ca-bundle\") pod \"cce4f075-8cb4-4f72-a590-56f3d507eebf\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.660570 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-inventory\") pod \"cce4f075-8cb4-4f72-a590-56f3d507eebf\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.660589 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-libvirt-combined-ca-bundle\") pod \"cce4f075-8cb4-4f72-a590-56f3d507eebf\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.660611 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-ssh-key-openstack-edpm-ipam\") pod \"cce4f075-8cb4-4f72-a590-56f3d507eebf\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.660629 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-repo-setup-combined-ca-bundle\") pod \"cce4f075-8cb4-4f72-a590-56f3d507eebf\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.660657 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"cce4f075-8cb4-4f72-a590-56f3d507eebf\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.660675 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-telemetry-combined-ca-bundle\") pod \"cce4f075-8cb4-4f72-a590-56f3d507eebf\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.660709 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"cce4f075-8cb4-4f72-a590-56f3d507eebf\" (UID: \"cce4f075-8cb4-4f72-a590-56f3d507eebf\") " Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.667919 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "cce4f075-8cb4-4f72-a590-56f3d507eebf" (UID: "cce4f075-8cb4-4f72-a590-56f3d507eebf"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.668092 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "cce4f075-8cb4-4f72-a590-56f3d507eebf" (UID: "cce4f075-8cb4-4f72-a590-56f3d507eebf"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.668804 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "cce4f075-8cb4-4f72-a590-56f3d507eebf" (UID: "cce4f075-8cb4-4f72-a590-56f3d507eebf"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.668981 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "cce4f075-8cb4-4f72-a590-56f3d507eebf" (UID: "cce4f075-8cb4-4f72-a590-56f3d507eebf"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.669134 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "cce4f075-8cb4-4f72-a590-56f3d507eebf" (UID: "cce4f075-8cb4-4f72-a590-56f3d507eebf"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.669835 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "cce4f075-8cb4-4f72-a590-56f3d507eebf" (UID: "cce4f075-8cb4-4f72-a590-56f3d507eebf"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.671063 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "cce4f075-8cb4-4f72-a590-56f3d507eebf" (UID: "cce4f075-8cb4-4f72-a590-56f3d507eebf"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.671815 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "cce4f075-8cb4-4f72-a590-56f3d507eebf" (UID: "cce4f075-8cb4-4f72-a590-56f3d507eebf"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.671977 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-kube-api-access-9mnjj" (OuterVolumeSpecName: "kube-api-access-9mnjj") pod "cce4f075-8cb4-4f72-a590-56f3d507eebf" (UID: "cce4f075-8cb4-4f72-a590-56f3d507eebf"). InnerVolumeSpecName "kube-api-access-9mnjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.673023 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "cce4f075-8cb4-4f72-a590-56f3d507eebf" (UID: "cce4f075-8cb4-4f72-a590-56f3d507eebf"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.673078 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "cce4f075-8cb4-4f72-a590-56f3d507eebf" (UID: "cce4f075-8cb4-4f72-a590-56f3d507eebf"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.674254 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "cce4f075-8cb4-4f72-a590-56f3d507eebf" (UID: "cce4f075-8cb4-4f72-a590-56f3d507eebf"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.696191 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "cce4f075-8cb4-4f72-a590-56f3d507eebf" (UID: "cce4f075-8cb4-4f72-a590-56f3d507eebf"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.701011 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-inventory" (OuterVolumeSpecName: "inventory") pod "cce4f075-8cb4-4f72-a590-56f3d507eebf" (UID: "cce4f075-8cb4-4f72-a590-56f3d507eebf"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.762691 4906 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.763284 4906 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.763397 4906 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.763522 4906 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.763605 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mnjj\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-kube-api-access-9mnjj\") on node \"crc\" DevicePath \"\"" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.763683 4906 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.763764 4906 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-inventory\") on node \"crc\" DevicePath \"\"" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.763871 4906 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.763980 4906 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.764060 4906 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.764156 4906 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.764246 4906 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cce4f075-8cb4-4f72-a590-56f3d507eebf-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.764378 4906 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Feb 27 09:05:26 crc kubenswrapper[4906]: I0227 09:05:26.764468 4906 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/cce4f075-8cb4-4f72-a590-56f3d507eebf-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.170078 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" event={"ID":"cce4f075-8cb4-4f72-a590-56f3d507eebf","Type":"ContainerDied","Data":"8981bd149e64766a7fa9f02e5a73f4287c4950d70f163984e5e6739ab1de0b8c"} Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.170134 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8981bd149e64766a7fa9f02e5a73f4287c4950d70f163984e5e6739ab1de0b8c" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.170207 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.258400 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99"] Feb 27 09:05:27 crc kubenswrapper[4906]: E0227 09:05:27.258942 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cce4f075-8cb4-4f72-a590-56f3d507eebf" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.258966 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="cce4f075-8cb4-4f72-a590-56f3d507eebf" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.259285 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="cce4f075-8cb4-4f72-a590-56f3d507eebf" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.260174 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.262333 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d466j" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.262691 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.262832 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.263495 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.264265 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.279286 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99"] Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.377901 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/00a360db-bbc2-40f9-a12a-0b8af451cb3c-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-57m99\" (UID: \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.377997 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dns2\" (UniqueName: \"kubernetes.io/projected/00a360db-bbc2-40f9-a12a-0b8af451cb3c-kube-api-access-9dns2\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-57m99\" (UID: \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.378131 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/00a360db-bbc2-40f9-a12a-0b8af451cb3c-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-57m99\" (UID: \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.378307 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/00a360db-bbc2-40f9-a12a-0b8af451cb3c-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-57m99\" (UID: \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.378413 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a360db-bbc2-40f9-a12a-0b8af451cb3c-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-57m99\" (UID: \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.480384 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a360db-bbc2-40f9-a12a-0b8af451cb3c-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-57m99\" (UID: \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.480501 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/00a360db-bbc2-40f9-a12a-0b8af451cb3c-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-57m99\" (UID: \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.480555 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dns2\" (UniqueName: \"kubernetes.io/projected/00a360db-bbc2-40f9-a12a-0b8af451cb3c-kube-api-access-9dns2\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-57m99\" (UID: \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.480573 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/00a360db-bbc2-40f9-a12a-0b8af451cb3c-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-57m99\" (UID: \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.480613 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/00a360db-bbc2-40f9-a12a-0b8af451cb3c-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-57m99\" (UID: \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.481653 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/00a360db-bbc2-40f9-a12a-0b8af451cb3c-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-57m99\" (UID: \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.485371 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/00a360db-bbc2-40f9-a12a-0b8af451cb3c-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-57m99\" (UID: \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.485715 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/00a360db-bbc2-40f9-a12a-0b8af451cb3c-ssh-key-openstack-edpm-ipam\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-57m99\" (UID: \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.494688 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a360db-bbc2-40f9-a12a-0b8af451cb3c-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-57m99\" (UID: \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.497699 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dns2\" (UniqueName: \"kubernetes.io/projected/00a360db-bbc2-40f9-a12a-0b8af451cb3c-kube-api-access-9dns2\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-57m99\" (UID: \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" Feb 27 09:05:27 crc kubenswrapper[4906]: I0227 09:05:27.578905 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" Feb 27 09:05:28 crc kubenswrapper[4906]: I0227 09:05:28.570157 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99"] Feb 27 09:05:29 crc kubenswrapper[4906]: I0227 09:05:29.194293 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" event={"ID":"00a360db-bbc2-40f9-a12a-0b8af451cb3c","Type":"ContainerStarted","Data":"d2fcf76c18be7780672f34a7882dbfa26ec381bb56d47de81aee39d2376c8af0"} Feb 27 09:05:29 crc kubenswrapper[4906]: I0227 09:05:29.199473 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-75zpk" event={"ID":"06848b27-bb9f-472b-8a29-cb683684f6d1","Type":"ContainerStarted","Data":"183b9c8796a2a4dc84d9a95787e9ff9f29c380915780323017a6449162019103"} Feb 27 09:05:30 crc kubenswrapper[4906]: I0227 09:05:30.211068 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" event={"ID":"00a360db-bbc2-40f9-a12a-0b8af451cb3c","Type":"ContainerStarted","Data":"06db34ac8152b5a94916ebe39bb294a41aa517d31291eceff64edbe0ff6284d6"} Feb 27 09:05:30 crc kubenswrapper[4906]: I0227 09:05:30.230345 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-75zpk" podStartSLOduration=4.316417464 podStartE2EDuration="9.230311514s" podCreationTimestamp="2026-02-27 09:05:21 +0000 UTC" firstStartedPulling="2026-02-27 09:05:23.089962316 +0000 UTC m=+2221.484363926" lastFinishedPulling="2026-02-27 09:05:28.003856366 +0000 UTC m=+2226.398257976" observedRunningTime="2026-02-27 09:05:29.228992343 +0000 UTC m=+2227.623393983" watchObservedRunningTime="2026-02-27 09:05:30.230311514 +0000 UTC m=+2228.624713124" Feb 27 09:05:30 crc kubenswrapper[4906]: I0227 09:05:30.239918 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" podStartSLOduration=2.354670557 podStartE2EDuration="3.239872864s" podCreationTimestamp="2026-02-27 09:05:27 +0000 UTC" firstStartedPulling="2026-02-27 09:05:28.569527994 +0000 UTC m=+2226.963929604" lastFinishedPulling="2026-02-27 09:05:29.454730301 +0000 UTC m=+2227.849131911" observedRunningTime="2026-02-27 09:05:30.228506247 +0000 UTC m=+2228.622907887" watchObservedRunningTime="2026-02-27 09:05:30.239872864 +0000 UTC m=+2228.634274474" Feb 27 09:05:31 crc kubenswrapper[4906]: I0227 09:05:31.915037 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-75zpk" Feb 27 09:05:31 crc kubenswrapper[4906]: I0227 09:05:31.915365 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-75zpk" Feb 27 09:05:31 crc kubenswrapper[4906]: I0227 09:05:31.966492 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-75zpk" Feb 27 09:05:41 crc kubenswrapper[4906]: I0227 09:05:41.964608 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-75zpk" Feb 27 09:05:45 crc kubenswrapper[4906]: I0227 09:05:45.329701 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-75zpk"] Feb 27 09:05:45 crc kubenswrapper[4906]: I0227 09:05:45.330331 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-75zpk" podUID="06848b27-bb9f-472b-8a29-cb683684f6d1" containerName="registry-server" containerID="cri-o://183b9c8796a2a4dc84d9a95787e9ff9f29c380915780323017a6449162019103" gracePeriod=2 Feb 27 09:05:46 crc kubenswrapper[4906]: I0227 09:05:46.360442 4906 generic.go:334] "Generic (PLEG): container finished" podID="06848b27-bb9f-472b-8a29-cb683684f6d1" containerID="183b9c8796a2a4dc84d9a95787e9ff9f29c380915780323017a6449162019103" exitCode=0 Feb 27 09:05:46 crc kubenswrapper[4906]: I0227 09:05:46.360504 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-75zpk" event={"ID":"06848b27-bb9f-472b-8a29-cb683684f6d1","Type":"ContainerDied","Data":"183b9c8796a2a4dc84d9a95787e9ff9f29c380915780323017a6449162019103"} Feb 27 09:05:46 crc kubenswrapper[4906]: I0227 09:05:46.365897 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-75zpk" event={"ID":"06848b27-bb9f-472b-8a29-cb683684f6d1","Type":"ContainerDied","Data":"58b540cf2a7010bc58c4602ec4a80c4b56a3d02b98e10058ad547125268f012f"} Feb 27 09:05:46 crc kubenswrapper[4906]: I0227 09:05:46.365927 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="58b540cf2a7010bc58c4602ec4a80c4b56a3d02b98e10058ad547125268f012f" Feb 27 09:05:46 crc kubenswrapper[4906]: I0227 09:05:46.383618 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-75zpk" Feb 27 09:05:46 crc kubenswrapper[4906]: I0227 09:05:46.564212 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06848b27-bb9f-472b-8a29-cb683684f6d1-catalog-content\") pod \"06848b27-bb9f-472b-8a29-cb683684f6d1\" (UID: \"06848b27-bb9f-472b-8a29-cb683684f6d1\") " Feb 27 09:05:46 crc kubenswrapper[4906]: I0227 09:05:46.564280 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06848b27-bb9f-472b-8a29-cb683684f6d1-utilities\") pod \"06848b27-bb9f-472b-8a29-cb683684f6d1\" (UID: \"06848b27-bb9f-472b-8a29-cb683684f6d1\") " Feb 27 09:05:46 crc kubenswrapper[4906]: I0227 09:05:46.564312 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5p6b\" (UniqueName: \"kubernetes.io/projected/06848b27-bb9f-472b-8a29-cb683684f6d1-kube-api-access-f5p6b\") pod \"06848b27-bb9f-472b-8a29-cb683684f6d1\" (UID: \"06848b27-bb9f-472b-8a29-cb683684f6d1\") " Feb 27 09:05:46 crc kubenswrapper[4906]: I0227 09:05:46.565542 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06848b27-bb9f-472b-8a29-cb683684f6d1-utilities" (OuterVolumeSpecName: "utilities") pod "06848b27-bb9f-472b-8a29-cb683684f6d1" (UID: "06848b27-bb9f-472b-8a29-cb683684f6d1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:05:46 crc kubenswrapper[4906]: I0227 09:05:46.572848 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06848b27-bb9f-472b-8a29-cb683684f6d1-kube-api-access-f5p6b" (OuterVolumeSpecName: "kube-api-access-f5p6b") pod "06848b27-bb9f-472b-8a29-cb683684f6d1" (UID: "06848b27-bb9f-472b-8a29-cb683684f6d1"). InnerVolumeSpecName "kube-api-access-f5p6b". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:05:46 crc kubenswrapper[4906]: I0227 09:05:46.632282 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06848b27-bb9f-472b-8a29-cb683684f6d1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "06848b27-bb9f-472b-8a29-cb683684f6d1" (UID: "06848b27-bb9f-472b-8a29-cb683684f6d1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:05:46 crc kubenswrapper[4906]: I0227 09:05:46.668082 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06848b27-bb9f-472b-8a29-cb683684f6d1-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 09:05:46 crc kubenswrapper[4906]: I0227 09:05:46.668132 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06848b27-bb9f-472b-8a29-cb683684f6d1-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 09:05:46 crc kubenswrapper[4906]: I0227 09:05:46.668146 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5p6b\" (UniqueName: \"kubernetes.io/projected/06848b27-bb9f-472b-8a29-cb683684f6d1-kube-api-access-f5p6b\") on node \"crc\" DevicePath \"\"" Feb 27 09:05:47 crc kubenswrapper[4906]: I0227 09:05:47.376391 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-75zpk" Feb 27 09:05:47 crc kubenswrapper[4906]: I0227 09:05:47.421969 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-75zpk"] Feb 27 09:05:47 crc kubenswrapper[4906]: I0227 09:05:47.430424 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-75zpk"] Feb 27 09:05:48 crc kubenswrapper[4906]: I0227 09:05:48.564654 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06848b27-bb9f-472b-8a29-cb683684f6d1" path="/var/lib/kubelet/pods/06848b27-bb9f-472b-8a29-cb683684f6d1/volumes" Feb 27 09:05:54 crc kubenswrapper[4906]: I0227 09:05:54.844281 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 09:05:54 crc kubenswrapper[4906]: I0227 09:05:54.844876 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 09:06:00 crc kubenswrapper[4906]: I0227 09:06:00.192250 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536386-p9dx2"] Feb 27 09:06:00 crc kubenswrapper[4906]: E0227 09:06:00.199534 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06848b27-bb9f-472b-8a29-cb683684f6d1" containerName="extract-utilities" Feb 27 09:06:00 crc kubenswrapper[4906]: I0227 09:06:00.199560 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="06848b27-bb9f-472b-8a29-cb683684f6d1" containerName="extract-utilities" Feb 27 09:06:00 crc kubenswrapper[4906]: E0227 09:06:00.199599 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06848b27-bb9f-472b-8a29-cb683684f6d1" containerName="extract-content" Feb 27 09:06:00 crc kubenswrapper[4906]: I0227 09:06:00.199606 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="06848b27-bb9f-472b-8a29-cb683684f6d1" containerName="extract-content" Feb 27 09:06:00 crc kubenswrapper[4906]: E0227 09:06:00.199628 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06848b27-bb9f-472b-8a29-cb683684f6d1" containerName="registry-server" Feb 27 09:06:00 crc kubenswrapper[4906]: I0227 09:06:00.199635 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="06848b27-bb9f-472b-8a29-cb683684f6d1" containerName="registry-server" Feb 27 09:06:00 crc kubenswrapper[4906]: I0227 09:06:00.199973 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="06848b27-bb9f-472b-8a29-cb683684f6d1" containerName="registry-server" Feb 27 09:06:00 crc kubenswrapper[4906]: I0227 09:06:00.200854 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536386-p9dx2" Feb 27 09:06:00 crc kubenswrapper[4906]: I0227 09:06:00.245865 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 09:06:00 crc kubenswrapper[4906]: I0227 09:06:00.245950 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 09:06:00 crc kubenswrapper[4906]: I0227 09:06:00.246103 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 09:06:00 crc kubenswrapper[4906]: I0227 09:06:00.250206 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536386-p9dx2"] Feb 27 09:06:00 crc kubenswrapper[4906]: I0227 09:06:00.256437 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtmlj\" (UniqueName: \"kubernetes.io/projected/134ce321-7d7a-4c13-a08b-26a893ba4473-kube-api-access-wtmlj\") pod \"auto-csr-approver-29536386-p9dx2\" (UID: \"134ce321-7d7a-4c13-a08b-26a893ba4473\") " pod="openshift-infra/auto-csr-approver-29536386-p9dx2" Feb 27 09:06:00 crc kubenswrapper[4906]: I0227 09:06:00.359048 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtmlj\" (UniqueName: \"kubernetes.io/projected/134ce321-7d7a-4c13-a08b-26a893ba4473-kube-api-access-wtmlj\") pod \"auto-csr-approver-29536386-p9dx2\" (UID: \"134ce321-7d7a-4c13-a08b-26a893ba4473\") " pod="openshift-infra/auto-csr-approver-29536386-p9dx2" Feb 27 09:06:00 crc kubenswrapper[4906]: I0227 09:06:00.388072 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtmlj\" (UniqueName: \"kubernetes.io/projected/134ce321-7d7a-4c13-a08b-26a893ba4473-kube-api-access-wtmlj\") pod \"auto-csr-approver-29536386-p9dx2\" (UID: \"134ce321-7d7a-4c13-a08b-26a893ba4473\") " pod="openshift-infra/auto-csr-approver-29536386-p9dx2" Feb 27 09:06:00 crc kubenswrapper[4906]: I0227 09:06:00.574442 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536386-p9dx2" Feb 27 09:06:01 crc kubenswrapper[4906]: I0227 09:06:01.062708 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536386-p9dx2"] Feb 27 09:06:01 crc kubenswrapper[4906]: I0227 09:06:01.526563 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536386-p9dx2" event={"ID":"134ce321-7d7a-4c13-a08b-26a893ba4473","Type":"ContainerStarted","Data":"b1bf833b3d5690a0972240d84b0f70d55f59e6872c1c5dade3b68d94cb7d1716"} Feb 27 09:06:02 crc kubenswrapper[4906]: I0227 09:06:02.542521 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536386-p9dx2" event={"ID":"134ce321-7d7a-4c13-a08b-26a893ba4473","Type":"ContainerStarted","Data":"a75bd4048de21da9f2b85f84da3e3c490b0dc176ba18ae0f87a75926bcd0591b"} Feb 27 09:06:02 crc kubenswrapper[4906]: I0227 09:06:02.565917 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-infra/auto-csr-approver-29536386-p9dx2" podStartSLOduration=1.657160741 podStartE2EDuration="2.565861302s" podCreationTimestamp="2026-02-27 09:06:00 +0000 UTC" firstStartedPulling="2026-02-27 09:06:01.07300514 +0000 UTC m=+2259.467406740" lastFinishedPulling="2026-02-27 09:06:01.981705691 +0000 UTC m=+2260.376107301" observedRunningTime="2026-02-27 09:06:02.562916005 +0000 UTC m=+2260.957317625" watchObservedRunningTime="2026-02-27 09:06:02.565861302 +0000 UTC m=+2260.960262912" Feb 27 09:06:03 crc kubenswrapper[4906]: I0227 09:06:03.554523 4906 generic.go:334] "Generic (PLEG): container finished" podID="134ce321-7d7a-4c13-a08b-26a893ba4473" containerID="a75bd4048de21da9f2b85f84da3e3c490b0dc176ba18ae0f87a75926bcd0591b" exitCode=0 Feb 27 09:06:03 crc kubenswrapper[4906]: I0227 09:06:03.555234 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536386-p9dx2" event={"ID":"134ce321-7d7a-4c13-a08b-26a893ba4473","Type":"ContainerDied","Data":"a75bd4048de21da9f2b85f84da3e3c490b0dc176ba18ae0f87a75926bcd0591b"} Feb 27 09:06:04 crc kubenswrapper[4906]: I0227 09:06:04.974639 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536386-p9dx2" Feb 27 09:06:05 crc kubenswrapper[4906]: I0227 09:06:05.062921 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtmlj\" (UniqueName: \"kubernetes.io/projected/134ce321-7d7a-4c13-a08b-26a893ba4473-kube-api-access-wtmlj\") pod \"134ce321-7d7a-4c13-a08b-26a893ba4473\" (UID: \"134ce321-7d7a-4c13-a08b-26a893ba4473\") " Feb 27 09:06:05 crc kubenswrapper[4906]: I0227 09:06:05.071959 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/134ce321-7d7a-4c13-a08b-26a893ba4473-kube-api-access-wtmlj" (OuterVolumeSpecName: "kube-api-access-wtmlj") pod "134ce321-7d7a-4c13-a08b-26a893ba4473" (UID: "134ce321-7d7a-4c13-a08b-26a893ba4473"). InnerVolumeSpecName "kube-api-access-wtmlj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:06:05 crc kubenswrapper[4906]: I0227 09:06:05.165567 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtmlj\" (UniqueName: \"kubernetes.io/projected/134ce321-7d7a-4c13-a08b-26a893ba4473-kube-api-access-wtmlj\") on node \"crc\" DevicePath \"\"" Feb 27 09:06:05 crc kubenswrapper[4906]: I0227 09:06:05.576305 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536386-p9dx2" event={"ID":"134ce321-7d7a-4c13-a08b-26a893ba4473","Type":"ContainerDied","Data":"b1bf833b3d5690a0972240d84b0f70d55f59e6872c1c5dade3b68d94cb7d1716"} Feb 27 09:06:05 crc kubenswrapper[4906]: I0227 09:06:05.576351 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b1bf833b3d5690a0972240d84b0f70d55f59e6872c1c5dade3b68d94cb7d1716" Feb 27 09:06:05 crc kubenswrapper[4906]: I0227 09:06:05.576370 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536386-p9dx2" Feb 27 09:06:05 crc kubenswrapper[4906]: I0227 09:06:05.632977 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536380-lvt5k"] Feb 27 09:06:05 crc kubenswrapper[4906]: I0227 09:06:05.641552 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536380-lvt5k"] Feb 27 09:06:06 crc kubenswrapper[4906]: I0227 09:06:06.563913 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="234a230d-4b25-4208-814b-9c883c68f128" path="/var/lib/kubelet/pods/234a230d-4b25-4208-814b-9c883c68f128/volumes" Feb 27 09:06:24 crc kubenswrapper[4906]: I0227 09:06:24.844530 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 09:06:24 crc kubenswrapper[4906]: I0227 09:06:24.845300 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 09:06:24 crc kubenswrapper[4906]: I0227 09:06:24.845427 4906 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 09:06:24 crc kubenswrapper[4906]: I0227 09:06:24.846939 4906 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9e0140582f9bf5221401dd77c11465d92b2dac3c13999181c7540eb7eab49661"} pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 27 09:06:24 crc kubenswrapper[4906]: I0227 09:06:24.847061 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" containerID="cri-o://9e0140582f9bf5221401dd77c11465d92b2dac3c13999181c7540eb7eab49661" gracePeriod=600 Feb 27 09:06:25 crc kubenswrapper[4906]: I0227 09:06:25.796449 4906 generic.go:334] "Generic (PLEG): container finished" podID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerID="9e0140582f9bf5221401dd77c11465d92b2dac3c13999181c7540eb7eab49661" exitCode=0 Feb 27 09:06:25 crc kubenswrapper[4906]: I0227 09:06:25.796471 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerDied","Data":"9e0140582f9bf5221401dd77c11465d92b2dac3c13999181c7540eb7eab49661"} Feb 27 09:06:25 crc kubenswrapper[4906]: I0227 09:06:25.796783 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerStarted","Data":"f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26"} Feb 27 09:06:25 crc kubenswrapper[4906]: I0227 09:06:25.796823 4906 scope.go:117] "RemoveContainer" containerID="a76d97b4c7fba2d9540f1c777c1e29b121429f999a33a981cd4d0d42250c8b13" Feb 27 09:06:32 crc kubenswrapper[4906]: I0227 09:06:32.873801 4906 generic.go:334] "Generic (PLEG): container finished" podID="00a360db-bbc2-40f9-a12a-0b8af451cb3c" containerID="06db34ac8152b5a94916ebe39bb294a41aa517d31291eceff64edbe0ff6284d6" exitCode=0 Feb 27 09:06:32 crc kubenswrapper[4906]: I0227 09:06:32.873920 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" event={"ID":"00a360db-bbc2-40f9-a12a-0b8af451cb3c","Type":"ContainerDied","Data":"06db34ac8152b5a94916ebe39bb294a41aa517d31291eceff64edbe0ff6284d6"} Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.480501 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.527228 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9dns2\" (UniqueName: \"kubernetes.io/projected/00a360db-bbc2-40f9-a12a-0b8af451cb3c-kube-api-access-9dns2\") pod \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\" (UID: \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\") " Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.527396 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/00a360db-bbc2-40f9-a12a-0b8af451cb3c-ssh-key-openstack-edpm-ipam\") pod \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\" (UID: \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\") " Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.527430 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a360db-bbc2-40f9-a12a-0b8af451cb3c-ovn-combined-ca-bundle\") pod \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\" (UID: \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\") " Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.527592 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/00a360db-bbc2-40f9-a12a-0b8af451cb3c-inventory\") pod \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\" (UID: \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\") " Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.527644 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/00a360db-bbc2-40f9-a12a-0b8af451cb3c-ovncontroller-config-0\") pod \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\" (UID: \"00a360db-bbc2-40f9-a12a-0b8af451cb3c\") " Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.533311 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00a360db-bbc2-40f9-a12a-0b8af451cb3c-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "00a360db-bbc2-40f9-a12a-0b8af451cb3c" (UID: "00a360db-bbc2-40f9-a12a-0b8af451cb3c"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.533576 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00a360db-bbc2-40f9-a12a-0b8af451cb3c-kube-api-access-9dns2" (OuterVolumeSpecName: "kube-api-access-9dns2") pod "00a360db-bbc2-40f9-a12a-0b8af451cb3c" (UID: "00a360db-bbc2-40f9-a12a-0b8af451cb3c"). InnerVolumeSpecName "kube-api-access-9dns2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.556036 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00a360db-bbc2-40f9-a12a-0b8af451cb3c-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "00a360db-bbc2-40f9-a12a-0b8af451cb3c" (UID: "00a360db-bbc2-40f9-a12a-0b8af451cb3c"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.557449 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00a360db-bbc2-40f9-a12a-0b8af451cb3c-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "00a360db-bbc2-40f9-a12a-0b8af451cb3c" (UID: "00a360db-bbc2-40f9-a12a-0b8af451cb3c"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.575836 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00a360db-bbc2-40f9-a12a-0b8af451cb3c-inventory" (OuterVolumeSpecName: "inventory") pod "00a360db-bbc2-40f9-a12a-0b8af451cb3c" (UID: "00a360db-bbc2-40f9-a12a-0b8af451cb3c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.631071 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9dns2\" (UniqueName: \"kubernetes.io/projected/00a360db-bbc2-40f9-a12a-0b8af451cb3c-kube-api-access-9dns2\") on node \"crc\" DevicePath \"\"" Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.631110 4906 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/00a360db-bbc2-40f9-a12a-0b8af451cb3c-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.631124 4906 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/00a360db-bbc2-40f9-a12a-0b8af451cb3c-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.631138 4906 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/00a360db-bbc2-40f9-a12a-0b8af451cb3c-inventory\") on node \"crc\" DevicePath \"\"" Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.631151 4906 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/00a360db-bbc2-40f9-a12a-0b8af451cb3c-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.897833 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" event={"ID":"00a360db-bbc2-40f9-a12a-0b8af451cb3c","Type":"ContainerDied","Data":"d2fcf76c18be7780672f34a7882dbfa26ec381bb56d47de81aee39d2376c8af0"} Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.897903 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d2fcf76c18be7780672f34a7882dbfa26ec381bb56d47de81aee39d2376c8af0" Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.898809 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-57m99" Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.997662 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd"] Feb 27 09:06:34 crc kubenswrapper[4906]: E0227 09:06:34.998540 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="134ce321-7d7a-4c13-a08b-26a893ba4473" containerName="oc" Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.998558 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="134ce321-7d7a-4c13-a08b-26a893ba4473" containerName="oc" Feb 27 09:06:34 crc kubenswrapper[4906]: E0227 09:06:34.998626 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00a360db-bbc2-40f9-a12a-0b8af451cb3c" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.998635 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="00a360db-bbc2-40f9-a12a-0b8af451cb3c" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.998862 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="00a360db-bbc2-40f9-a12a-0b8af451cb3c" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.998918 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="134ce321-7d7a-4c13-a08b-26a893ba4473" containerName="oc" Feb 27 09:06:34 crc kubenswrapper[4906]: I0227 09:06:34.999784 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.006599 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.007046 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.007135 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.007419 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.007674 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d466j" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.007797 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.011147 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd"] Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.039264 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.039624 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.039905 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j9vs5\" (UniqueName: \"kubernetes.io/projected/2306aef3-5469-438c-a3fb-0a0b987c7372-kube-api-access-j9vs5\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.040074 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.040129 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.040298 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.142006 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.142097 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.142128 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.142173 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j9vs5\" (UniqueName: \"kubernetes.io/projected/2306aef3-5469-438c-a3fb-0a0b987c7372-kube-api-access-j9vs5\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.142220 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.142246 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.147282 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-ssh-key-openstack-edpm-ipam\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.147474 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.148540 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.148572 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.150531 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.165557 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j9vs5\" (UniqueName: \"kubernetes.io/projected/2306aef3-5469-438c-a3fb-0a0b987c7372-kube-api-access-j9vs5\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.318627 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:06:35 crc kubenswrapper[4906]: I0227 09:06:35.992571 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd"] Feb 27 09:06:36 crc kubenswrapper[4906]: I0227 09:06:36.916704 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" event={"ID":"2306aef3-5469-438c-a3fb-0a0b987c7372","Type":"ContainerStarted","Data":"ff22337d789b295b3b0429409a7b6a17b8a67907690482e50cb3abc4999b9351"} Feb 27 09:06:36 crc kubenswrapper[4906]: I0227 09:06:36.917055 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" event={"ID":"2306aef3-5469-438c-a3fb-0a0b987c7372","Type":"ContainerStarted","Data":"7acd5c6e435448a859a221e6b144d4fb4562fa111d089d250a10f27900848217"} Feb 27 09:06:36 crc kubenswrapper[4906]: I0227 09:06:36.942386 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" podStartSLOduration=2.454380784 podStartE2EDuration="2.942367782s" podCreationTimestamp="2026-02-27 09:06:34 +0000 UTC" firstStartedPulling="2026-02-27 09:06:35.996341387 +0000 UTC m=+2294.390742987" lastFinishedPulling="2026-02-27 09:06:36.484328345 +0000 UTC m=+2294.878729985" observedRunningTime="2026-02-27 09:06:36.93158003 +0000 UTC m=+2295.325981660" watchObservedRunningTime="2026-02-27 09:06:36.942367782 +0000 UTC m=+2295.336769392" Feb 27 09:07:03 crc kubenswrapper[4906]: I0227 09:07:03.425160 4906 scope.go:117] "RemoveContainer" containerID="5d02da8cfa5ebbc27ff6e76a1e4c21a95fc5d6799dfe41f360cb1fdc8e0b1387" Feb 27 09:07:24 crc kubenswrapper[4906]: I0227 09:07:24.414514 4906 generic.go:334] "Generic (PLEG): container finished" podID="2306aef3-5469-438c-a3fb-0a0b987c7372" containerID="ff22337d789b295b3b0429409a7b6a17b8a67907690482e50cb3abc4999b9351" exitCode=0 Feb 27 09:07:24 crc kubenswrapper[4906]: I0227 09:07:24.414624 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" event={"ID":"2306aef3-5469-438c-a3fb-0a0b987c7372","Type":"ContainerDied","Data":"ff22337d789b295b3b0429409a7b6a17b8a67907690482e50cb3abc4999b9351"} Feb 27 09:07:25 crc kubenswrapper[4906]: I0227 09:07:25.874608 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:07:25 crc kubenswrapper[4906]: I0227 09:07:25.936939 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-inventory\") pod \"2306aef3-5469-438c-a3fb-0a0b987c7372\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " Feb 27 09:07:25 crc kubenswrapper[4906]: I0227 09:07:25.938138 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j9vs5\" (UniqueName: \"kubernetes.io/projected/2306aef3-5469-438c-a3fb-0a0b987c7372-kube-api-access-j9vs5\") pod \"2306aef3-5469-438c-a3fb-0a0b987c7372\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " Feb 27 09:07:25 crc kubenswrapper[4906]: I0227 09:07:25.938193 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-nova-metadata-neutron-config-0\") pod \"2306aef3-5469-438c-a3fb-0a0b987c7372\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " Feb 27 09:07:25 crc kubenswrapper[4906]: I0227 09:07:25.938255 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-neutron-metadata-combined-ca-bundle\") pod \"2306aef3-5469-438c-a3fb-0a0b987c7372\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " Feb 27 09:07:25 crc kubenswrapper[4906]: I0227 09:07:25.938357 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-ssh-key-openstack-edpm-ipam\") pod \"2306aef3-5469-438c-a3fb-0a0b987c7372\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " Feb 27 09:07:25 crc kubenswrapper[4906]: I0227 09:07:25.938376 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-neutron-ovn-metadata-agent-neutron-config-0\") pod \"2306aef3-5469-438c-a3fb-0a0b987c7372\" (UID: \"2306aef3-5469-438c-a3fb-0a0b987c7372\") " Feb 27 09:07:25 crc kubenswrapper[4906]: I0227 09:07:25.949963 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "2306aef3-5469-438c-a3fb-0a0b987c7372" (UID: "2306aef3-5469-438c-a3fb-0a0b987c7372"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:07:25 crc kubenswrapper[4906]: I0227 09:07:25.955063 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2306aef3-5469-438c-a3fb-0a0b987c7372-kube-api-access-j9vs5" (OuterVolumeSpecName: "kube-api-access-j9vs5") pod "2306aef3-5469-438c-a3fb-0a0b987c7372" (UID: "2306aef3-5469-438c-a3fb-0a0b987c7372"). InnerVolumeSpecName "kube-api-access-j9vs5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:07:25 crc kubenswrapper[4906]: I0227 09:07:25.970515 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-inventory" (OuterVolumeSpecName: "inventory") pod "2306aef3-5469-438c-a3fb-0a0b987c7372" (UID: "2306aef3-5469-438c-a3fb-0a0b987c7372"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:07:25 crc kubenswrapper[4906]: I0227 09:07:25.977705 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "2306aef3-5469-438c-a3fb-0a0b987c7372" (UID: "2306aef3-5469-438c-a3fb-0a0b987c7372"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:07:25 crc kubenswrapper[4906]: I0227 09:07:25.985074 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "2306aef3-5469-438c-a3fb-0a0b987c7372" (UID: "2306aef3-5469-438c-a3fb-0a0b987c7372"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:07:25 crc kubenswrapper[4906]: I0227 09:07:25.990202 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "2306aef3-5469-438c-a3fb-0a0b987c7372" (UID: "2306aef3-5469-438c-a3fb-0a0b987c7372"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.040860 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j9vs5\" (UniqueName: \"kubernetes.io/projected/2306aef3-5469-438c-a3fb-0a0b987c7372-kube-api-access-j9vs5\") on node \"crc\" DevicePath \"\"" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.040931 4906 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.040952 4906 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.040974 4906 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.040995 4906 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.041015 4906 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2306aef3-5469-438c-a3fb-0a0b987c7372-inventory\") on node \"crc\" DevicePath \"\"" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.442044 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" event={"ID":"2306aef3-5469-438c-a3fb-0a0b987c7372","Type":"ContainerDied","Data":"7acd5c6e435448a859a221e6b144d4fb4562fa111d089d250a10f27900848217"} Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.442106 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7acd5c6e435448a859a221e6b144d4fb4562fa111d089d250a10f27900848217" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.442207 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.542328 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5"] Feb 27 09:07:26 crc kubenswrapper[4906]: E0227 09:07:26.543164 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2306aef3-5469-438c-a3fb-0a0b987c7372" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.543196 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="2306aef3-5469-438c-a3fb-0a0b987c7372" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.543462 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="2306aef3-5469-438c-a3fb-0a0b987c7372" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.544596 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.548219 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.548422 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.548642 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d466j" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.548860 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.549919 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.572307 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5"] Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.666717 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5\" (UID: \"a0093a0e-b072-4131-a483-ffb3b8858f51\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.666897 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bbpbj\" (UniqueName: \"kubernetes.io/projected/a0093a0e-b072-4131-a483-ffb3b8858f51-kube-api-access-bbpbj\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5\" (UID: \"a0093a0e-b072-4131-a483-ffb3b8858f51\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.666980 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5\" (UID: \"a0093a0e-b072-4131-a483-ffb3b8858f51\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.667017 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5\" (UID: \"a0093a0e-b072-4131-a483-ffb3b8858f51\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.667044 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5\" (UID: \"a0093a0e-b072-4131-a483-ffb3b8858f51\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.769646 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5\" (UID: \"a0093a0e-b072-4131-a483-ffb3b8858f51\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.770127 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bbpbj\" (UniqueName: \"kubernetes.io/projected/a0093a0e-b072-4131-a483-ffb3b8858f51-kube-api-access-bbpbj\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5\" (UID: \"a0093a0e-b072-4131-a483-ffb3b8858f51\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.770201 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5\" (UID: \"a0093a0e-b072-4131-a483-ffb3b8858f51\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.770227 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5\" (UID: \"a0093a0e-b072-4131-a483-ffb3b8858f51\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.770250 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5\" (UID: \"a0093a0e-b072-4131-a483-ffb3b8858f51\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.774434 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-ssh-key-openstack-edpm-ipam\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5\" (UID: \"a0093a0e-b072-4131-a483-ffb3b8858f51\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.774472 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5\" (UID: \"a0093a0e-b072-4131-a483-ffb3b8858f51\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.774786 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5\" (UID: \"a0093a0e-b072-4131-a483-ffb3b8858f51\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.778337 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5\" (UID: \"a0093a0e-b072-4131-a483-ffb3b8858f51\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.789411 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bbpbj\" (UniqueName: \"kubernetes.io/projected/a0093a0e-b072-4131-a483-ffb3b8858f51-kube-api-access-bbpbj\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5\" (UID: \"a0093a0e-b072-4131-a483-ffb3b8858f51\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" Feb 27 09:07:26 crc kubenswrapper[4906]: I0227 09:07:26.872018 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" Feb 27 09:07:27 crc kubenswrapper[4906]: I0227 09:07:27.449569 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5"] Feb 27 09:07:28 crc kubenswrapper[4906]: I0227 09:07:28.470385 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" event={"ID":"a0093a0e-b072-4131-a483-ffb3b8858f51","Type":"ContainerStarted","Data":"2c1c7aab78333a557ef776c9559c88081acafd86d9f73de17b27ccd736e60789"} Feb 27 09:07:28 crc kubenswrapper[4906]: I0227 09:07:28.471352 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" event={"ID":"a0093a0e-b072-4131-a483-ffb3b8858f51","Type":"ContainerStarted","Data":"167bc0d3f2b047ac6a4b5cf8ba561d74a989bda47b8dbc008561961a6682beba"} Feb 27 09:07:28 crc kubenswrapper[4906]: I0227 09:07:28.512513 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" podStartSLOduration=2.066159587 podStartE2EDuration="2.512491178s" podCreationTimestamp="2026-02-27 09:07:26 +0000 UTC" firstStartedPulling="2026-02-27 09:07:27.460179395 +0000 UTC m=+2345.854580995" lastFinishedPulling="2026-02-27 09:07:27.906510986 +0000 UTC m=+2346.300912586" observedRunningTime="2026-02-27 09:07:28.506695827 +0000 UTC m=+2346.901097447" watchObservedRunningTime="2026-02-27 09:07:28.512491178 +0000 UTC m=+2346.906892808" Feb 27 09:07:30 crc kubenswrapper[4906]: I0227 09:07:30.753019 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fmkvl"] Feb 27 09:07:30 crc kubenswrapper[4906]: I0227 09:07:30.755589 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fmkvl" Feb 27 09:07:30 crc kubenswrapper[4906]: I0227 09:07:30.772523 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fmkvl"] Feb 27 09:07:30 crc kubenswrapper[4906]: I0227 09:07:30.866353 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e-catalog-content\") pod \"certified-operators-fmkvl\" (UID: \"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e\") " pod="openshift-marketplace/certified-operators-fmkvl" Feb 27 09:07:30 crc kubenswrapper[4906]: I0227 09:07:30.866406 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25gsb\" (UniqueName: \"kubernetes.io/projected/8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e-kube-api-access-25gsb\") pod \"certified-operators-fmkvl\" (UID: \"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e\") " pod="openshift-marketplace/certified-operators-fmkvl" Feb 27 09:07:30 crc kubenswrapper[4906]: I0227 09:07:30.866656 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e-utilities\") pod \"certified-operators-fmkvl\" (UID: \"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e\") " pod="openshift-marketplace/certified-operators-fmkvl" Feb 27 09:07:30 crc kubenswrapper[4906]: I0227 09:07:30.968345 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e-catalog-content\") pod \"certified-operators-fmkvl\" (UID: \"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e\") " pod="openshift-marketplace/certified-operators-fmkvl" Feb 27 09:07:30 crc kubenswrapper[4906]: I0227 09:07:30.968414 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25gsb\" (UniqueName: \"kubernetes.io/projected/8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e-kube-api-access-25gsb\") pod \"certified-operators-fmkvl\" (UID: \"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e\") " pod="openshift-marketplace/certified-operators-fmkvl" Feb 27 09:07:30 crc kubenswrapper[4906]: I0227 09:07:30.968507 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e-utilities\") pod \"certified-operators-fmkvl\" (UID: \"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e\") " pod="openshift-marketplace/certified-operators-fmkvl" Feb 27 09:07:30 crc kubenswrapper[4906]: I0227 09:07:30.969115 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e-catalog-content\") pod \"certified-operators-fmkvl\" (UID: \"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e\") " pod="openshift-marketplace/certified-operators-fmkvl" Feb 27 09:07:30 crc kubenswrapper[4906]: I0227 09:07:30.969215 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e-utilities\") pod \"certified-operators-fmkvl\" (UID: \"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e\") " pod="openshift-marketplace/certified-operators-fmkvl" Feb 27 09:07:30 crc kubenswrapper[4906]: I0227 09:07:30.997400 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25gsb\" (UniqueName: \"kubernetes.io/projected/8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e-kube-api-access-25gsb\") pod \"certified-operators-fmkvl\" (UID: \"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e\") " pod="openshift-marketplace/certified-operators-fmkvl" Feb 27 09:07:31 crc kubenswrapper[4906]: I0227 09:07:31.078821 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fmkvl" Feb 27 09:07:31 crc kubenswrapper[4906]: I0227 09:07:31.566833 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fmkvl"] Feb 27 09:07:31 crc kubenswrapper[4906]: W0227 09:07:31.568810 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8b4dfa32_8153_48f5_a9f1_ae99bdc35b4e.slice/crio-3b254fbdcff047ee93a8ee477df69b5396e971b118db4c787061625c385ffa30 WatchSource:0}: Error finding container 3b254fbdcff047ee93a8ee477df69b5396e971b118db4c787061625c385ffa30: Status 404 returned error can't find the container with id 3b254fbdcff047ee93a8ee477df69b5396e971b118db4c787061625c385ffa30 Feb 27 09:07:32 crc kubenswrapper[4906]: I0227 09:07:32.515404 4906 generic.go:334] "Generic (PLEG): container finished" podID="8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e" containerID="2cc98a53d161f6e9ac887296a3b615e5d4d4b991eb64fe62f5c92d66615d7776" exitCode=0 Feb 27 09:07:32 crc kubenswrapper[4906]: I0227 09:07:32.515673 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fmkvl" event={"ID":"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e","Type":"ContainerDied","Data":"2cc98a53d161f6e9ac887296a3b615e5d4d4b991eb64fe62f5c92d66615d7776"} Feb 27 09:07:32 crc kubenswrapper[4906]: I0227 09:07:32.516098 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fmkvl" event={"ID":"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e","Type":"ContainerStarted","Data":"3b254fbdcff047ee93a8ee477df69b5396e971b118db4c787061625c385ffa30"} Feb 27 09:07:33 crc kubenswrapper[4906]: I0227 09:07:33.525613 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kr6vq"] Feb 27 09:07:33 crc kubenswrapper[4906]: I0227 09:07:33.528430 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kr6vq" Feb 27 09:07:33 crc kubenswrapper[4906]: I0227 09:07:33.530301 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fmkvl" event={"ID":"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e","Type":"ContainerStarted","Data":"e07b2a2d545f3f4065b1c15f5ce4e85c3987ed3c9a57a05b6f99463d2e522c13"} Feb 27 09:07:33 crc kubenswrapper[4906]: I0227 09:07:33.552165 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kr6vq"] Feb 27 09:07:33 crc kubenswrapper[4906]: I0227 09:07:33.627753 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c054e4e-8c99-4ada-bea5-19eef2674662-utilities\") pod \"redhat-marketplace-kr6vq\" (UID: \"9c054e4e-8c99-4ada-bea5-19eef2674662\") " pod="openshift-marketplace/redhat-marketplace-kr6vq" Feb 27 09:07:33 crc kubenswrapper[4906]: I0227 09:07:33.627866 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c054e4e-8c99-4ada-bea5-19eef2674662-catalog-content\") pod \"redhat-marketplace-kr6vq\" (UID: \"9c054e4e-8c99-4ada-bea5-19eef2674662\") " pod="openshift-marketplace/redhat-marketplace-kr6vq" Feb 27 09:07:33 crc kubenswrapper[4906]: I0227 09:07:33.627998 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pttt\" (UniqueName: \"kubernetes.io/projected/9c054e4e-8c99-4ada-bea5-19eef2674662-kube-api-access-4pttt\") pod \"redhat-marketplace-kr6vq\" (UID: \"9c054e4e-8c99-4ada-bea5-19eef2674662\") " pod="openshift-marketplace/redhat-marketplace-kr6vq" Feb 27 09:07:33 crc kubenswrapper[4906]: I0227 09:07:33.729656 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c054e4e-8c99-4ada-bea5-19eef2674662-catalog-content\") pod \"redhat-marketplace-kr6vq\" (UID: \"9c054e4e-8c99-4ada-bea5-19eef2674662\") " pod="openshift-marketplace/redhat-marketplace-kr6vq" Feb 27 09:07:33 crc kubenswrapper[4906]: I0227 09:07:33.729803 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pttt\" (UniqueName: \"kubernetes.io/projected/9c054e4e-8c99-4ada-bea5-19eef2674662-kube-api-access-4pttt\") pod \"redhat-marketplace-kr6vq\" (UID: \"9c054e4e-8c99-4ada-bea5-19eef2674662\") " pod="openshift-marketplace/redhat-marketplace-kr6vq" Feb 27 09:07:33 crc kubenswrapper[4906]: I0227 09:07:33.730001 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c054e4e-8c99-4ada-bea5-19eef2674662-utilities\") pod \"redhat-marketplace-kr6vq\" (UID: \"9c054e4e-8c99-4ada-bea5-19eef2674662\") " pod="openshift-marketplace/redhat-marketplace-kr6vq" Feb 27 09:07:33 crc kubenswrapper[4906]: I0227 09:07:33.731088 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c054e4e-8c99-4ada-bea5-19eef2674662-catalog-content\") pod \"redhat-marketplace-kr6vq\" (UID: \"9c054e4e-8c99-4ada-bea5-19eef2674662\") " pod="openshift-marketplace/redhat-marketplace-kr6vq" Feb 27 09:07:33 crc kubenswrapper[4906]: I0227 09:07:33.731429 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c054e4e-8c99-4ada-bea5-19eef2674662-utilities\") pod \"redhat-marketplace-kr6vq\" (UID: \"9c054e4e-8c99-4ada-bea5-19eef2674662\") " pod="openshift-marketplace/redhat-marketplace-kr6vq" Feb 27 09:07:33 crc kubenswrapper[4906]: I0227 09:07:33.756117 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pttt\" (UniqueName: \"kubernetes.io/projected/9c054e4e-8c99-4ada-bea5-19eef2674662-kube-api-access-4pttt\") pod \"redhat-marketplace-kr6vq\" (UID: \"9c054e4e-8c99-4ada-bea5-19eef2674662\") " pod="openshift-marketplace/redhat-marketplace-kr6vq" Feb 27 09:07:33 crc kubenswrapper[4906]: I0227 09:07:33.847363 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kr6vq" Feb 27 09:07:34 crc kubenswrapper[4906]: I0227 09:07:34.405691 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kr6vq"] Feb 27 09:07:34 crc kubenswrapper[4906]: W0227 09:07:34.409160 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c054e4e_8c99_4ada_bea5_19eef2674662.slice/crio-2212db5d793c64764b093fafdb99e286235e42f06d86797db26075c85c65049a WatchSource:0}: Error finding container 2212db5d793c64764b093fafdb99e286235e42f06d86797db26075c85c65049a: Status 404 returned error can't find the container with id 2212db5d793c64764b093fafdb99e286235e42f06d86797db26075c85c65049a Feb 27 09:07:34 crc kubenswrapper[4906]: I0227 09:07:34.545233 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kr6vq" event={"ID":"9c054e4e-8c99-4ada-bea5-19eef2674662","Type":"ContainerStarted","Data":"2212db5d793c64764b093fafdb99e286235e42f06d86797db26075c85c65049a"} Feb 27 09:07:34 crc kubenswrapper[4906]: I0227 09:07:34.549936 4906 generic.go:334] "Generic (PLEG): container finished" podID="8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e" containerID="e07b2a2d545f3f4065b1c15f5ce4e85c3987ed3c9a57a05b6f99463d2e522c13" exitCode=0 Feb 27 09:07:34 crc kubenswrapper[4906]: I0227 09:07:34.550021 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fmkvl" event={"ID":"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e","Type":"ContainerDied","Data":"e07b2a2d545f3f4065b1c15f5ce4e85c3987ed3c9a57a05b6f99463d2e522c13"} Feb 27 09:07:35 crc kubenswrapper[4906]: I0227 09:07:35.562347 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fmkvl" event={"ID":"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e","Type":"ContainerStarted","Data":"e55b5304a1d64d6c5afb8dbb53b31b0ce0ef318262a1c30db38daeecdcf12d1a"} Feb 27 09:07:35 crc kubenswrapper[4906]: I0227 09:07:35.564455 4906 generic.go:334] "Generic (PLEG): container finished" podID="9c054e4e-8c99-4ada-bea5-19eef2674662" containerID="941e7ddf652fad3c47bbafbb119b87807916cfe6c72a1b8c8124734cda5b00df" exitCode=0 Feb 27 09:07:35 crc kubenswrapper[4906]: I0227 09:07:35.564512 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kr6vq" event={"ID":"9c054e4e-8c99-4ada-bea5-19eef2674662","Type":"ContainerDied","Data":"941e7ddf652fad3c47bbafbb119b87807916cfe6c72a1b8c8124734cda5b00df"} Feb 27 09:07:35 crc kubenswrapper[4906]: I0227 09:07:35.596005 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fmkvl" podStartSLOduration=2.817251162 podStartE2EDuration="5.595983169s" podCreationTimestamp="2026-02-27 09:07:30 +0000 UTC" firstStartedPulling="2026-02-27 09:07:32.517805949 +0000 UTC m=+2350.912207569" lastFinishedPulling="2026-02-27 09:07:35.296537966 +0000 UTC m=+2353.690939576" observedRunningTime="2026-02-27 09:07:35.585119565 +0000 UTC m=+2353.979521175" watchObservedRunningTime="2026-02-27 09:07:35.595983169 +0000 UTC m=+2353.990384779" Feb 27 09:07:36 crc kubenswrapper[4906]: I0227 09:07:36.578310 4906 generic.go:334] "Generic (PLEG): container finished" podID="9c054e4e-8c99-4ada-bea5-19eef2674662" containerID="07a9136d352cb843d8cd7b05fe266f030a20a1b1d14984305a2fc447cc842be9" exitCode=0 Feb 27 09:07:36 crc kubenswrapper[4906]: I0227 09:07:36.578863 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kr6vq" event={"ID":"9c054e4e-8c99-4ada-bea5-19eef2674662","Type":"ContainerDied","Data":"07a9136d352cb843d8cd7b05fe266f030a20a1b1d14984305a2fc447cc842be9"} Feb 27 09:07:37 crc kubenswrapper[4906]: I0227 09:07:37.592790 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kr6vq" event={"ID":"9c054e4e-8c99-4ada-bea5-19eef2674662","Type":"ContainerStarted","Data":"c5062eb3aa5752d7a1f7801d8d65974683a5c7ab2e498e7af53d2ef465bb79d6"} Feb 27 09:07:37 crc kubenswrapper[4906]: I0227 09:07:37.620788 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kr6vq" podStartSLOduration=3.173827204 podStartE2EDuration="4.620761167s" podCreationTimestamp="2026-02-27 09:07:33 +0000 UTC" firstStartedPulling="2026-02-27 09:07:35.566754395 +0000 UTC m=+2353.961156005" lastFinishedPulling="2026-02-27 09:07:37.013688368 +0000 UTC m=+2355.408089968" observedRunningTime="2026-02-27 09:07:37.613696233 +0000 UTC m=+2356.008097843" watchObservedRunningTime="2026-02-27 09:07:37.620761167 +0000 UTC m=+2356.015162777" Feb 27 09:07:41 crc kubenswrapper[4906]: I0227 09:07:41.079025 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fmkvl" Feb 27 09:07:41 crc kubenswrapper[4906]: I0227 09:07:41.079386 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fmkvl" Feb 27 09:07:41 crc kubenswrapper[4906]: I0227 09:07:41.139104 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fmkvl" Feb 27 09:07:41 crc kubenswrapper[4906]: I0227 09:07:41.687285 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fmkvl" Feb 27 09:07:42 crc kubenswrapper[4906]: I0227 09:07:42.718694 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fmkvl"] Feb 27 09:07:43 crc kubenswrapper[4906]: I0227 09:07:43.666212 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fmkvl" podUID="8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e" containerName="registry-server" containerID="cri-o://e55b5304a1d64d6c5afb8dbb53b31b0ce0ef318262a1c30db38daeecdcf12d1a" gracePeriod=2 Feb 27 09:07:43 crc kubenswrapper[4906]: I0227 09:07:43.848579 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kr6vq" Feb 27 09:07:43 crc kubenswrapper[4906]: I0227 09:07:43.848660 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kr6vq" Feb 27 09:07:43 crc kubenswrapper[4906]: I0227 09:07:43.918767 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kr6vq" Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.156265 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fmkvl" Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.268017 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e-utilities\") pod \"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e\" (UID: \"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e\") " Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.268181 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25gsb\" (UniqueName: \"kubernetes.io/projected/8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e-kube-api-access-25gsb\") pod \"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e\" (UID: \"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e\") " Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.268309 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e-catalog-content\") pod \"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e\" (UID: \"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e\") " Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.269561 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e-utilities" (OuterVolumeSpecName: "utilities") pod "8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e" (UID: "8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.279107 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e-kube-api-access-25gsb" (OuterVolumeSpecName: "kube-api-access-25gsb") pod "8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e" (UID: "8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e"). InnerVolumeSpecName "kube-api-access-25gsb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.370803 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.370830 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25gsb\" (UniqueName: \"kubernetes.io/projected/8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e-kube-api-access-25gsb\") on node \"crc\" DevicePath \"\"" Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.678315 4906 generic.go:334] "Generic (PLEG): container finished" podID="8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e" containerID="e55b5304a1d64d6c5afb8dbb53b31b0ce0ef318262a1c30db38daeecdcf12d1a" exitCode=0 Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.678398 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fmkvl" event={"ID":"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e","Type":"ContainerDied","Data":"e55b5304a1d64d6c5afb8dbb53b31b0ce0ef318262a1c30db38daeecdcf12d1a"} Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.678460 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fmkvl" event={"ID":"8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e","Type":"ContainerDied","Data":"3b254fbdcff047ee93a8ee477df69b5396e971b118db4c787061625c385ffa30"} Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.678455 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fmkvl" Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.678480 4906 scope.go:117] "RemoveContainer" containerID="e55b5304a1d64d6c5afb8dbb53b31b0ce0ef318262a1c30db38daeecdcf12d1a" Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.714906 4906 scope.go:117] "RemoveContainer" containerID="e07b2a2d545f3f4065b1c15f5ce4e85c3987ed3c9a57a05b6f99463d2e522c13" Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.732005 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e" (UID: "8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.755374 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kr6vq" Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.779616 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.781266 4906 scope.go:117] "RemoveContainer" containerID="2cc98a53d161f6e9ac887296a3b615e5d4d4b991eb64fe62f5c92d66615d7776" Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.809855 4906 scope.go:117] "RemoveContainer" containerID="e55b5304a1d64d6c5afb8dbb53b31b0ce0ef318262a1c30db38daeecdcf12d1a" Feb 27 09:07:45 crc kubenswrapper[4906]: E0227 09:07:44.813361 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e55b5304a1d64d6c5afb8dbb53b31b0ce0ef318262a1c30db38daeecdcf12d1a\": container with ID starting with e55b5304a1d64d6c5afb8dbb53b31b0ce0ef318262a1c30db38daeecdcf12d1a not found: ID does not exist" containerID="e55b5304a1d64d6c5afb8dbb53b31b0ce0ef318262a1c30db38daeecdcf12d1a" Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.813420 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e55b5304a1d64d6c5afb8dbb53b31b0ce0ef318262a1c30db38daeecdcf12d1a"} err="failed to get container status \"e55b5304a1d64d6c5afb8dbb53b31b0ce0ef318262a1c30db38daeecdcf12d1a\": rpc error: code = NotFound desc = could not find container \"e55b5304a1d64d6c5afb8dbb53b31b0ce0ef318262a1c30db38daeecdcf12d1a\": container with ID starting with e55b5304a1d64d6c5afb8dbb53b31b0ce0ef318262a1c30db38daeecdcf12d1a not found: ID does not exist" Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.813460 4906 scope.go:117] "RemoveContainer" containerID="e07b2a2d545f3f4065b1c15f5ce4e85c3987ed3c9a57a05b6f99463d2e522c13" Feb 27 09:07:45 crc kubenswrapper[4906]: E0227 09:07:44.814558 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e07b2a2d545f3f4065b1c15f5ce4e85c3987ed3c9a57a05b6f99463d2e522c13\": container with ID starting with e07b2a2d545f3f4065b1c15f5ce4e85c3987ed3c9a57a05b6f99463d2e522c13 not found: ID does not exist" containerID="e07b2a2d545f3f4065b1c15f5ce4e85c3987ed3c9a57a05b6f99463d2e522c13" Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.814613 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e07b2a2d545f3f4065b1c15f5ce4e85c3987ed3c9a57a05b6f99463d2e522c13"} err="failed to get container status \"e07b2a2d545f3f4065b1c15f5ce4e85c3987ed3c9a57a05b6f99463d2e522c13\": rpc error: code = NotFound desc = could not find container \"e07b2a2d545f3f4065b1c15f5ce4e85c3987ed3c9a57a05b6f99463d2e522c13\": container with ID starting with e07b2a2d545f3f4065b1c15f5ce4e85c3987ed3c9a57a05b6f99463d2e522c13 not found: ID does not exist" Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.814651 4906 scope.go:117] "RemoveContainer" containerID="2cc98a53d161f6e9ac887296a3b615e5d4d4b991eb64fe62f5c92d66615d7776" Feb 27 09:07:45 crc kubenswrapper[4906]: E0227 09:07:44.815225 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2cc98a53d161f6e9ac887296a3b615e5d4d4b991eb64fe62f5c92d66615d7776\": container with ID starting with 2cc98a53d161f6e9ac887296a3b615e5d4d4b991eb64fe62f5c92d66615d7776 not found: ID does not exist" containerID="2cc98a53d161f6e9ac887296a3b615e5d4d4b991eb64fe62f5c92d66615d7776" Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:44.815290 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2cc98a53d161f6e9ac887296a3b615e5d4d4b991eb64fe62f5c92d66615d7776"} err="failed to get container status \"2cc98a53d161f6e9ac887296a3b615e5d4d4b991eb64fe62f5c92d66615d7776\": rpc error: code = NotFound desc = could not find container \"2cc98a53d161f6e9ac887296a3b615e5d4d4b991eb64fe62f5c92d66615d7776\": container with ID starting with 2cc98a53d161f6e9ac887296a3b615e5d4d4b991eb64fe62f5c92d66615d7776 not found: ID does not exist" Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:45.027018 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fmkvl"] Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:45.041008 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fmkvl"] Feb 27 09:07:45 crc kubenswrapper[4906]: I0227 09:07:45.920359 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kr6vq"] Feb 27 09:07:46 crc kubenswrapper[4906]: I0227 09:07:46.563562 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e" path="/var/lib/kubelet/pods/8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e/volumes" Feb 27 09:07:47 crc kubenswrapper[4906]: I0227 09:07:47.716341 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kr6vq" podUID="9c054e4e-8c99-4ada-bea5-19eef2674662" containerName="registry-server" containerID="cri-o://c5062eb3aa5752d7a1f7801d8d65974683a5c7ab2e498e7af53d2ef465bb79d6" gracePeriod=2 Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.203228 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kr6vq" Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.275844 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pttt\" (UniqueName: \"kubernetes.io/projected/9c054e4e-8c99-4ada-bea5-19eef2674662-kube-api-access-4pttt\") pod \"9c054e4e-8c99-4ada-bea5-19eef2674662\" (UID: \"9c054e4e-8c99-4ada-bea5-19eef2674662\") " Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.276072 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c054e4e-8c99-4ada-bea5-19eef2674662-catalog-content\") pod \"9c054e4e-8c99-4ada-bea5-19eef2674662\" (UID: \"9c054e4e-8c99-4ada-bea5-19eef2674662\") " Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.276222 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c054e4e-8c99-4ada-bea5-19eef2674662-utilities\") pod \"9c054e4e-8c99-4ada-bea5-19eef2674662\" (UID: \"9c054e4e-8c99-4ada-bea5-19eef2674662\") " Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.277292 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c054e4e-8c99-4ada-bea5-19eef2674662-utilities" (OuterVolumeSpecName: "utilities") pod "9c054e4e-8c99-4ada-bea5-19eef2674662" (UID: "9c054e4e-8c99-4ada-bea5-19eef2674662"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.283787 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c054e4e-8c99-4ada-bea5-19eef2674662-kube-api-access-4pttt" (OuterVolumeSpecName: "kube-api-access-4pttt") pod "9c054e4e-8c99-4ada-bea5-19eef2674662" (UID: "9c054e4e-8c99-4ada-bea5-19eef2674662"). InnerVolumeSpecName "kube-api-access-4pttt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.305532 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c054e4e-8c99-4ada-bea5-19eef2674662-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9c054e4e-8c99-4ada-bea5-19eef2674662" (UID: "9c054e4e-8c99-4ada-bea5-19eef2674662"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.380052 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pttt\" (UniqueName: \"kubernetes.io/projected/9c054e4e-8c99-4ada-bea5-19eef2674662-kube-api-access-4pttt\") on node \"crc\" DevicePath \"\"" Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.380107 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c054e4e-8c99-4ada-bea5-19eef2674662-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.380123 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c054e4e-8c99-4ada-bea5-19eef2674662-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.729530 4906 generic.go:334] "Generic (PLEG): container finished" podID="9c054e4e-8c99-4ada-bea5-19eef2674662" containerID="c5062eb3aa5752d7a1f7801d8d65974683a5c7ab2e498e7af53d2ef465bb79d6" exitCode=0 Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.729630 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kr6vq" Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.732018 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kr6vq" event={"ID":"9c054e4e-8c99-4ada-bea5-19eef2674662","Type":"ContainerDied","Data":"c5062eb3aa5752d7a1f7801d8d65974683a5c7ab2e498e7af53d2ef465bb79d6"} Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.732088 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kr6vq" event={"ID":"9c054e4e-8c99-4ada-bea5-19eef2674662","Type":"ContainerDied","Data":"2212db5d793c64764b093fafdb99e286235e42f06d86797db26075c85c65049a"} Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.732114 4906 scope.go:117] "RemoveContainer" containerID="c5062eb3aa5752d7a1f7801d8d65974683a5c7ab2e498e7af53d2ef465bb79d6" Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.764761 4906 scope.go:117] "RemoveContainer" containerID="07a9136d352cb843d8cd7b05fe266f030a20a1b1d14984305a2fc447cc842be9" Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.771119 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kr6vq"] Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.786339 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kr6vq"] Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.792426 4906 scope.go:117] "RemoveContainer" containerID="941e7ddf652fad3c47bbafbb119b87807916cfe6c72a1b8c8124734cda5b00df" Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.841565 4906 scope.go:117] "RemoveContainer" containerID="c5062eb3aa5752d7a1f7801d8d65974683a5c7ab2e498e7af53d2ef465bb79d6" Feb 27 09:07:48 crc kubenswrapper[4906]: E0227 09:07:48.843718 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5062eb3aa5752d7a1f7801d8d65974683a5c7ab2e498e7af53d2ef465bb79d6\": container with ID starting with c5062eb3aa5752d7a1f7801d8d65974683a5c7ab2e498e7af53d2ef465bb79d6 not found: ID does not exist" containerID="c5062eb3aa5752d7a1f7801d8d65974683a5c7ab2e498e7af53d2ef465bb79d6" Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.843784 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5062eb3aa5752d7a1f7801d8d65974683a5c7ab2e498e7af53d2ef465bb79d6"} err="failed to get container status \"c5062eb3aa5752d7a1f7801d8d65974683a5c7ab2e498e7af53d2ef465bb79d6\": rpc error: code = NotFound desc = could not find container \"c5062eb3aa5752d7a1f7801d8d65974683a5c7ab2e498e7af53d2ef465bb79d6\": container with ID starting with c5062eb3aa5752d7a1f7801d8d65974683a5c7ab2e498e7af53d2ef465bb79d6 not found: ID does not exist" Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.843825 4906 scope.go:117] "RemoveContainer" containerID="07a9136d352cb843d8cd7b05fe266f030a20a1b1d14984305a2fc447cc842be9" Feb 27 09:07:48 crc kubenswrapper[4906]: E0227 09:07:48.844424 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07a9136d352cb843d8cd7b05fe266f030a20a1b1d14984305a2fc447cc842be9\": container with ID starting with 07a9136d352cb843d8cd7b05fe266f030a20a1b1d14984305a2fc447cc842be9 not found: ID does not exist" containerID="07a9136d352cb843d8cd7b05fe266f030a20a1b1d14984305a2fc447cc842be9" Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.844448 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07a9136d352cb843d8cd7b05fe266f030a20a1b1d14984305a2fc447cc842be9"} err="failed to get container status \"07a9136d352cb843d8cd7b05fe266f030a20a1b1d14984305a2fc447cc842be9\": rpc error: code = NotFound desc = could not find container \"07a9136d352cb843d8cd7b05fe266f030a20a1b1d14984305a2fc447cc842be9\": container with ID starting with 07a9136d352cb843d8cd7b05fe266f030a20a1b1d14984305a2fc447cc842be9 not found: ID does not exist" Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.844463 4906 scope.go:117] "RemoveContainer" containerID="941e7ddf652fad3c47bbafbb119b87807916cfe6c72a1b8c8124734cda5b00df" Feb 27 09:07:48 crc kubenswrapper[4906]: E0227 09:07:48.844926 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"941e7ddf652fad3c47bbafbb119b87807916cfe6c72a1b8c8124734cda5b00df\": container with ID starting with 941e7ddf652fad3c47bbafbb119b87807916cfe6c72a1b8c8124734cda5b00df not found: ID does not exist" containerID="941e7ddf652fad3c47bbafbb119b87807916cfe6c72a1b8c8124734cda5b00df" Feb 27 09:07:48 crc kubenswrapper[4906]: I0227 09:07:48.844981 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"941e7ddf652fad3c47bbafbb119b87807916cfe6c72a1b8c8124734cda5b00df"} err="failed to get container status \"941e7ddf652fad3c47bbafbb119b87807916cfe6c72a1b8c8124734cda5b00df\": rpc error: code = NotFound desc = could not find container \"941e7ddf652fad3c47bbafbb119b87807916cfe6c72a1b8c8124734cda5b00df\": container with ID starting with 941e7ddf652fad3c47bbafbb119b87807916cfe6c72a1b8c8124734cda5b00df not found: ID does not exist" Feb 27 09:07:50 crc kubenswrapper[4906]: I0227 09:07:50.564591 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c054e4e-8c99-4ada-bea5-19eef2674662" path="/var/lib/kubelet/pods/9c054e4e-8c99-4ada-bea5-19eef2674662/volumes" Feb 27 09:08:00 crc kubenswrapper[4906]: I0227 09:08:00.175249 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536388-qp7tr"] Feb 27 09:08:00 crc kubenswrapper[4906]: E0227 09:08:00.178055 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e" containerName="registry-server" Feb 27 09:08:00 crc kubenswrapper[4906]: I0227 09:08:00.178127 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e" containerName="registry-server" Feb 27 09:08:00 crc kubenswrapper[4906]: E0227 09:08:00.178153 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e" containerName="extract-content" Feb 27 09:08:00 crc kubenswrapper[4906]: I0227 09:08:00.178163 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e" containerName="extract-content" Feb 27 09:08:00 crc kubenswrapper[4906]: E0227 09:08:00.178214 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c054e4e-8c99-4ada-bea5-19eef2674662" containerName="registry-server" Feb 27 09:08:00 crc kubenswrapper[4906]: I0227 09:08:00.178224 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c054e4e-8c99-4ada-bea5-19eef2674662" containerName="registry-server" Feb 27 09:08:00 crc kubenswrapper[4906]: E0227 09:08:00.178246 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e" containerName="extract-utilities" Feb 27 09:08:00 crc kubenswrapper[4906]: I0227 09:08:00.178255 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e" containerName="extract-utilities" Feb 27 09:08:00 crc kubenswrapper[4906]: E0227 09:08:00.178297 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c054e4e-8c99-4ada-bea5-19eef2674662" containerName="extract-content" Feb 27 09:08:00 crc kubenswrapper[4906]: I0227 09:08:00.178306 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c054e4e-8c99-4ada-bea5-19eef2674662" containerName="extract-content" Feb 27 09:08:00 crc kubenswrapper[4906]: E0227 09:08:00.178336 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c054e4e-8c99-4ada-bea5-19eef2674662" containerName="extract-utilities" Feb 27 09:08:00 crc kubenswrapper[4906]: I0227 09:08:00.178344 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c054e4e-8c99-4ada-bea5-19eef2674662" containerName="extract-utilities" Feb 27 09:08:00 crc kubenswrapper[4906]: I0227 09:08:00.179138 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c054e4e-8c99-4ada-bea5-19eef2674662" containerName="registry-server" Feb 27 09:08:00 crc kubenswrapper[4906]: I0227 09:08:00.179194 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b4dfa32-8153-48f5-a9f1-ae99bdc35b4e" containerName="registry-server" Feb 27 09:08:00 crc kubenswrapper[4906]: I0227 09:08:00.181196 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536388-qp7tr" Feb 27 09:08:00 crc kubenswrapper[4906]: I0227 09:08:00.183643 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536388-qp7tr"] Feb 27 09:08:00 crc kubenswrapper[4906]: I0227 09:08:00.188773 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 09:08:00 crc kubenswrapper[4906]: I0227 09:08:00.188827 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 09:08:00 crc kubenswrapper[4906]: I0227 09:08:00.189246 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 09:08:00 crc kubenswrapper[4906]: I0227 09:08:00.280266 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdf6s\" (UniqueName: \"kubernetes.io/projected/d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db-kube-api-access-sdf6s\") pod \"auto-csr-approver-29536388-qp7tr\" (UID: \"d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db\") " pod="openshift-infra/auto-csr-approver-29536388-qp7tr" Feb 27 09:08:00 crc kubenswrapper[4906]: I0227 09:08:00.382277 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdf6s\" (UniqueName: \"kubernetes.io/projected/d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db-kube-api-access-sdf6s\") pod \"auto-csr-approver-29536388-qp7tr\" (UID: \"d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db\") " pod="openshift-infra/auto-csr-approver-29536388-qp7tr" Feb 27 09:08:00 crc kubenswrapper[4906]: I0227 09:08:00.409120 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdf6s\" (UniqueName: \"kubernetes.io/projected/d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db-kube-api-access-sdf6s\") pod \"auto-csr-approver-29536388-qp7tr\" (UID: \"d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db\") " pod="openshift-infra/auto-csr-approver-29536388-qp7tr" Feb 27 09:08:00 crc kubenswrapper[4906]: I0227 09:08:00.514129 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536388-qp7tr" Feb 27 09:08:01 crc kubenswrapper[4906]: I0227 09:08:00.999967 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536388-qp7tr"] Feb 27 09:08:01 crc kubenswrapper[4906]: I0227 09:08:01.877912 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536388-qp7tr" event={"ID":"d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db","Type":"ContainerStarted","Data":"8826f1fef919e0337bd8da69069d4f6e5c9b1771e77e3e3450ece871c8cb07df"} Feb 27 09:08:02 crc kubenswrapper[4906]: I0227 09:08:02.888956 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536388-qp7tr" event={"ID":"d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db","Type":"ContainerStarted","Data":"9598f356667d018348affe838a67a1466d2306cc873f1fe7692c3ff1389f183e"} Feb 27 09:08:02 crc kubenswrapper[4906]: I0227 09:08:02.909756 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-infra/auto-csr-approver-29536388-qp7tr" podStartSLOduration=1.622103438 podStartE2EDuration="2.909729749s" podCreationTimestamp="2026-02-27 09:08:00 +0000 UTC" firstStartedPulling="2026-02-27 09:08:01.002639475 +0000 UTC m=+2379.397041105" lastFinishedPulling="2026-02-27 09:08:02.290265816 +0000 UTC m=+2380.684667416" observedRunningTime="2026-02-27 09:08:02.900945079 +0000 UTC m=+2381.295346689" watchObservedRunningTime="2026-02-27 09:08:02.909729749 +0000 UTC m=+2381.304131359" Feb 27 09:08:03 crc kubenswrapper[4906]: I0227 09:08:03.902823 4906 generic.go:334] "Generic (PLEG): container finished" podID="d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db" containerID="9598f356667d018348affe838a67a1466d2306cc873f1fe7692c3ff1389f183e" exitCode=0 Feb 27 09:08:03 crc kubenswrapper[4906]: I0227 09:08:03.902917 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536388-qp7tr" event={"ID":"d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db","Type":"ContainerDied","Data":"9598f356667d018348affe838a67a1466d2306cc873f1fe7692c3ff1389f183e"} Feb 27 09:08:05 crc kubenswrapper[4906]: I0227 09:08:05.309761 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536388-qp7tr" Feb 27 09:08:05 crc kubenswrapper[4906]: I0227 09:08:05.388588 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdf6s\" (UniqueName: \"kubernetes.io/projected/d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db-kube-api-access-sdf6s\") pod \"d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db\" (UID: \"d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db\") " Feb 27 09:08:05 crc kubenswrapper[4906]: I0227 09:08:05.398661 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db-kube-api-access-sdf6s" (OuterVolumeSpecName: "kube-api-access-sdf6s") pod "d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db" (UID: "d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db"). InnerVolumeSpecName "kube-api-access-sdf6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:08:05 crc kubenswrapper[4906]: I0227 09:08:05.492061 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdf6s\" (UniqueName: \"kubernetes.io/projected/d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db-kube-api-access-sdf6s\") on node \"crc\" DevicePath \"\"" Feb 27 09:08:05 crc kubenswrapper[4906]: I0227 09:08:05.652166 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536382-m9ncn"] Feb 27 09:08:05 crc kubenswrapper[4906]: I0227 09:08:05.660364 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536382-m9ncn"] Feb 27 09:08:05 crc kubenswrapper[4906]: I0227 09:08:05.928820 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536388-qp7tr" event={"ID":"d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db","Type":"ContainerDied","Data":"8826f1fef919e0337bd8da69069d4f6e5c9b1771e77e3e3450ece871c8cb07df"} Feb 27 09:08:05 crc kubenswrapper[4906]: I0227 09:08:05.928869 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8826f1fef919e0337bd8da69069d4f6e5c9b1771e77e3e3450ece871c8cb07df" Feb 27 09:08:05 crc kubenswrapper[4906]: I0227 09:08:05.928980 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536388-qp7tr" Feb 27 09:08:06 crc kubenswrapper[4906]: I0227 09:08:06.562652 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="107ddf8b-5296-4d9c-92b7-c8ed0287c50c" path="/var/lib/kubelet/pods/107ddf8b-5296-4d9c-92b7-c8ed0287c50c/volumes" Feb 27 09:08:54 crc kubenswrapper[4906]: I0227 09:08:54.844413 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 09:08:54 crc kubenswrapper[4906]: I0227 09:08:54.845240 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 09:09:04 crc kubenswrapper[4906]: I0227 09:09:04.628386 4906 scope.go:117] "RemoveContainer" containerID="09caffd51bd53bec733f2c1ef08958a7cc4aefe1cc006598dee6cae5157c0aae" Feb 27 09:09:24 crc kubenswrapper[4906]: I0227 09:09:24.845085 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 09:09:24 crc kubenswrapper[4906]: I0227 09:09:24.846069 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 09:09:54 crc kubenswrapper[4906]: I0227 09:09:54.845242 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 09:09:54 crc kubenswrapper[4906]: I0227 09:09:54.846024 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 09:09:54 crc kubenswrapper[4906]: I0227 09:09:54.846088 4906 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 09:09:54 crc kubenswrapper[4906]: I0227 09:09:54.847104 4906 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26"} pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 27 09:09:54 crc kubenswrapper[4906]: I0227 09:09:54.847174 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" containerID="cri-o://f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" gracePeriod=600 Feb 27 09:09:54 crc kubenswrapper[4906]: E0227 09:09:54.986122 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:09:55 crc kubenswrapper[4906]: I0227 09:09:55.094509 4906 generic.go:334] "Generic (PLEG): container finished" podID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" exitCode=0 Feb 27 09:09:55 crc kubenswrapper[4906]: I0227 09:09:55.094564 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerDied","Data":"f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26"} Feb 27 09:09:55 crc kubenswrapper[4906]: I0227 09:09:55.094612 4906 scope.go:117] "RemoveContainer" containerID="9e0140582f9bf5221401dd77c11465d92b2dac3c13999181c7540eb7eab49661" Feb 27 09:09:55 crc kubenswrapper[4906]: I0227 09:09:55.095511 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:09:55 crc kubenswrapper[4906]: E0227 09:09:55.095841 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:10:00 crc kubenswrapper[4906]: I0227 09:10:00.161670 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536390-trhcv"] Feb 27 09:10:00 crc kubenswrapper[4906]: E0227 09:10:00.164624 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db" containerName="oc" Feb 27 09:10:00 crc kubenswrapper[4906]: I0227 09:10:00.164646 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db" containerName="oc" Feb 27 09:10:00 crc kubenswrapper[4906]: I0227 09:10:00.169639 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db" containerName="oc" Feb 27 09:10:00 crc kubenswrapper[4906]: I0227 09:10:00.176014 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536390-trhcv" Feb 27 09:10:00 crc kubenswrapper[4906]: I0227 09:10:00.184909 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 09:10:00 crc kubenswrapper[4906]: I0227 09:10:00.187664 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 09:10:00 crc kubenswrapper[4906]: I0227 09:10:00.188091 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 09:10:00 crc kubenswrapper[4906]: I0227 09:10:00.218444 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536390-trhcv"] Feb 27 09:10:00 crc kubenswrapper[4906]: I0227 09:10:00.359968 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqkzg\" (UniqueName: \"kubernetes.io/projected/90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b-kube-api-access-tqkzg\") pod \"auto-csr-approver-29536390-trhcv\" (UID: \"90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b\") " pod="openshift-infra/auto-csr-approver-29536390-trhcv" Feb 27 09:10:00 crc kubenswrapper[4906]: I0227 09:10:00.462594 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqkzg\" (UniqueName: \"kubernetes.io/projected/90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b-kube-api-access-tqkzg\") pod \"auto-csr-approver-29536390-trhcv\" (UID: \"90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b\") " pod="openshift-infra/auto-csr-approver-29536390-trhcv" Feb 27 09:10:00 crc kubenswrapper[4906]: I0227 09:10:00.497927 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqkzg\" (UniqueName: \"kubernetes.io/projected/90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b-kube-api-access-tqkzg\") pod \"auto-csr-approver-29536390-trhcv\" (UID: \"90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b\") " pod="openshift-infra/auto-csr-approver-29536390-trhcv" Feb 27 09:10:00 crc kubenswrapper[4906]: I0227 09:10:00.511172 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536390-trhcv" Feb 27 09:10:01 crc kubenswrapper[4906]: I0227 09:10:01.032455 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536390-trhcv"] Feb 27 09:10:01 crc kubenswrapper[4906]: I0227 09:10:01.041854 4906 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 27 09:10:01 crc kubenswrapper[4906]: I0227 09:10:01.168894 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536390-trhcv" event={"ID":"90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b","Type":"ContainerStarted","Data":"6a6ac404340db0fd5741fbe68a403d0d55fe61da74e8c16335172202148da3ea"} Feb 27 09:10:03 crc kubenswrapper[4906]: I0227 09:10:03.188756 4906 generic.go:334] "Generic (PLEG): container finished" podID="90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b" containerID="f17bd155d7ac0fb9528689230c402a875310975ed8a8fe5024044805ed3a277d" exitCode=0 Feb 27 09:10:03 crc kubenswrapper[4906]: I0227 09:10:03.188998 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536390-trhcv" event={"ID":"90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b","Type":"ContainerDied","Data":"f17bd155d7ac0fb9528689230c402a875310975ed8a8fe5024044805ed3a277d"} Feb 27 09:10:04 crc kubenswrapper[4906]: I0227 09:10:04.605318 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536390-trhcv" Feb 27 09:10:04 crc kubenswrapper[4906]: I0227 09:10:04.767804 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqkzg\" (UniqueName: \"kubernetes.io/projected/90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b-kube-api-access-tqkzg\") pod \"90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b\" (UID: \"90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b\") " Feb 27 09:10:04 crc kubenswrapper[4906]: I0227 09:10:04.775155 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b-kube-api-access-tqkzg" (OuterVolumeSpecName: "kube-api-access-tqkzg") pod "90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b" (UID: "90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b"). InnerVolumeSpecName "kube-api-access-tqkzg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:10:04 crc kubenswrapper[4906]: I0227 09:10:04.872041 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqkzg\" (UniqueName: \"kubernetes.io/projected/90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b-kube-api-access-tqkzg\") on node \"crc\" DevicePath \"\"" Feb 27 09:10:05 crc kubenswrapper[4906]: I0227 09:10:05.211344 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536390-trhcv" event={"ID":"90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b","Type":"ContainerDied","Data":"6a6ac404340db0fd5741fbe68a403d0d55fe61da74e8c16335172202148da3ea"} Feb 27 09:10:05 crc kubenswrapper[4906]: I0227 09:10:05.211401 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a6ac404340db0fd5741fbe68a403d0d55fe61da74e8c16335172202148da3ea" Feb 27 09:10:05 crc kubenswrapper[4906]: I0227 09:10:05.211484 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536390-trhcv" Feb 27 09:10:05 crc kubenswrapper[4906]: I0227 09:10:05.684141 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536384-k5pxk"] Feb 27 09:10:05 crc kubenswrapper[4906]: I0227 09:10:05.692540 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536384-k5pxk"] Feb 27 09:10:06 crc kubenswrapper[4906]: I0227 09:10:06.562731 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="204f0880-105e-4948-a07c-fd24a322842a" path="/var/lib/kubelet/pods/204f0880-105e-4948-a07c-fd24a322842a/volumes" Feb 27 09:10:09 crc kubenswrapper[4906]: I0227 09:10:09.551804 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:10:09 crc kubenswrapper[4906]: E0227 09:10:09.553516 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:10:21 crc kubenswrapper[4906]: I0227 09:10:21.552611 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:10:21 crc kubenswrapper[4906]: E0227 09:10:21.554596 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:10:33 crc kubenswrapper[4906]: I0227 09:10:33.552831 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:10:33 crc kubenswrapper[4906]: E0227 09:10:33.554549 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:10:47 crc kubenswrapper[4906]: I0227 09:10:47.553349 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:10:47 crc kubenswrapper[4906]: E0227 09:10:47.554596 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:11:00 crc kubenswrapper[4906]: I0227 09:11:00.552721 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:11:00 crc kubenswrapper[4906]: E0227 09:11:00.553854 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:11:04 crc kubenswrapper[4906]: I0227 09:11:04.735467 4906 scope.go:117] "RemoveContainer" containerID="6f4c2d1fff4fc6e5c61386c92d056a75c750d0b032f9abe3e5faaea202f04c0b" Feb 27 09:11:14 crc kubenswrapper[4906]: I0227 09:11:14.556743 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:11:14 crc kubenswrapper[4906]: E0227 09:11:14.558112 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:11:22 crc kubenswrapper[4906]: I0227 09:11:22.981371 4906 generic.go:334] "Generic (PLEG): container finished" podID="a0093a0e-b072-4131-a483-ffb3b8858f51" containerID="2c1c7aab78333a557ef776c9559c88081acafd86d9f73de17b27ccd736e60789" exitCode=0 Feb 27 09:11:22 crc kubenswrapper[4906]: I0227 09:11:22.981535 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" event={"ID":"a0093a0e-b072-4131-a483-ffb3b8858f51","Type":"ContainerDied","Data":"2c1c7aab78333a557ef776c9559c88081acafd86d9f73de17b27ccd736e60789"} Feb 27 09:11:24 crc kubenswrapper[4906]: I0227 09:11:24.433067 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" Feb 27 09:11:24 crc kubenswrapper[4906]: I0227 09:11:24.541510 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-inventory\") pod \"a0093a0e-b072-4131-a483-ffb3b8858f51\" (UID: \"a0093a0e-b072-4131-a483-ffb3b8858f51\") " Feb 27 09:11:24 crc kubenswrapper[4906]: I0227 09:11:24.541752 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-libvirt-combined-ca-bundle\") pod \"a0093a0e-b072-4131-a483-ffb3b8858f51\" (UID: \"a0093a0e-b072-4131-a483-ffb3b8858f51\") " Feb 27 09:11:24 crc kubenswrapper[4906]: I0227 09:11:24.541832 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-libvirt-secret-0\") pod \"a0093a0e-b072-4131-a483-ffb3b8858f51\" (UID: \"a0093a0e-b072-4131-a483-ffb3b8858f51\") " Feb 27 09:11:24 crc kubenswrapper[4906]: I0227 09:11:24.541945 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bbpbj\" (UniqueName: \"kubernetes.io/projected/a0093a0e-b072-4131-a483-ffb3b8858f51-kube-api-access-bbpbj\") pod \"a0093a0e-b072-4131-a483-ffb3b8858f51\" (UID: \"a0093a0e-b072-4131-a483-ffb3b8858f51\") " Feb 27 09:11:24 crc kubenswrapper[4906]: I0227 09:11:24.542116 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-ssh-key-openstack-edpm-ipam\") pod \"a0093a0e-b072-4131-a483-ffb3b8858f51\" (UID: \"a0093a0e-b072-4131-a483-ffb3b8858f51\") " Feb 27 09:11:24 crc kubenswrapper[4906]: I0227 09:11:24.547492 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0093a0e-b072-4131-a483-ffb3b8858f51-kube-api-access-bbpbj" (OuterVolumeSpecName: "kube-api-access-bbpbj") pod "a0093a0e-b072-4131-a483-ffb3b8858f51" (UID: "a0093a0e-b072-4131-a483-ffb3b8858f51"). InnerVolumeSpecName "kube-api-access-bbpbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:11:24 crc kubenswrapper[4906]: I0227 09:11:24.549014 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "a0093a0e-b072-4131-a483-ffb3b8858f51" (UID: "a0093a0e-b072-4131-a483-ffb3b8858f51"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:11:24 crc kubenswrapper[4906]: I0227 09:11:24.570408 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "a0093a0e-b072-4131-a483-ffb3b8858f51" (UID: "a0093a0e-b072-4131-a483-ffb3b8858f51"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:11:24 crc kubenswrapper[4906]: I0227 09:11:24.575540 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-inventory" (OuterVolumeSpecName: "inventory") pod "a0093a0e-b072-4131-a483-ffb3b8858f51" (UID: "a0093a0e-b072-4131-a483-ffb3b8858f51"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:11:24 crc kubenswrapper[4906]: I0227 09:11:24.580133 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "a0093a0e-b072-4131-a483-ffb3b8858f51" (UID: "a0093a0e-b072-4131-a483-ffb3b8858f51"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:11:24 crc kubenswrapper[4906]: I0227 09:11:24.645044 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bbpbj\" (UniqueName: \"kubernetes.io/projected/a0093a0e-b072-4131-a483-ffb3b8858f51-kube-api-access-bbpbj\") on node \"crc\" DevicePath \"\"" Feb 27 09:11:24 crc kubenswrapper[4906]: I0227 09:11:24.645080 4906 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 27 09:11:24 crc kubenswrapper[4906]: I0227 09:11:24.645092 4906 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-inventory\") on node \"crc\" DevicePath \"\"" Feb 27 09:11:24 crc kubenswrapper[4906]: I0227 09:11:24.645102 4906 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 09:11:24 crc kubenswrapper[4906]: I0227 09:11:24.645112 4906 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/a0093a0e-b072-4131-a483-ffb3b8858f51-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.003732 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" event={"ID":"a0093a0e-b072-4131-a483-ffb3b8858f51","Type":"ContainerDied","Data":"167bc0d3f2b047ac6a4b5cf8ba561d74a989bda47b8dbc008561961a6682beba"} Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.003821 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="167bc0d3f2b047ac6a4b5cf8ba561d74a989bda47b8dbc008561961a6682beba" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.003824 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.110608 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z"] Feb 27 09:11:25 crc kubenswrapper[4906]: E0227 09:11:25.112509 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b" containerName="oc" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.112537 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b" containerName="oc" Feb 27 09:11:25 crc kubenswrapper[4906]: E0227 09:11:25.112569 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0093a0e-b072-4131-a483-ffb3b8858f51" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.112616 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0093a0e-b072-4131-a483-ffb3b8858f51" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.112831 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0093a0e-b072-4131-a483-ffb3b8858f51" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.112845 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b" containerName="oc" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.113569 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.117007 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.117067 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.117513 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.117823 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.118103 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.119554 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.120165 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d466j" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.124744 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z"] Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.259066 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.259122 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k728f\" (UniqueName: \"kubernetes.io/projected/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-kube-api-access-k728f\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.259159 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.259186 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-2\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-2\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.259423 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-ssh-key-openstack-edpm-ipam\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.259621 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-3\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-3\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.259689 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.259912 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.259963 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.260198 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.260320 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.361931 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-ssh-key-openstack-edpm-ipam\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.362011 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-3\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-3\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.362044 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.362094 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.362120 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.362167 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.362203 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.362316 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.362348 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k728f\" (UniqueName: \"kubernetes.io/projected/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-kube-api-access-k728f\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.362379 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.362405 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-2\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-2\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.363494 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.367345 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-2\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-2\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.368142 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-3\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-3\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.369187 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.370269 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.370495 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.370539 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.371039 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.371816 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-ssh-key-openstack-edpm-ipam\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.372667 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.395417 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k728f\" (UniqueName: \"kubernetes.io/projected/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-kube-api-access-k728f\") pod \"nova-edpm-deployment-openstack-edpm-ipam-sqg9z\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:25 crc kubenswrapper[4906]: I0227 09:11:25.437100 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:11:26 crc kubenswrapper[4906]: I0227 09:11:26.013741 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z"] Feb 27 09:11:27 crc kubenswrapper[4906]: I0227 09:11:27.028022 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" event={"ID":"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d","Type":"ContainerStarted","Data":"3bae194c97ba5915930786c28d4c2c3dbfef04c35e006804c002be7c627c6681"} Feb 27 09:11:27 crc kubenswrapper[4906]: I0227 09:11:27.028360 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" event={"ID":"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d","Type":"ContainerStarted","Data":"052c9526f5613f4db232393420725f0a84d55ec6e8175f74cf222f79ec500373"} Feb 27 09:11:27 crc kubenswrapper[4906]: I0227 09:11:27.058470 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" podStartSLOduration=1.61197184 podStartE2EDuration="2.058446934s" podCreationTimestamp="2026-02-27 09:11:25 +0000 UTC" firstStartedPulling="2026-02-27 09:11:26.01256738 +0000 UTC m=+2584.406969030" lastFinishedPulling="2026-02-27 09:11:26.459042504 +0000 UTC m=+2584.853444124" observedRunningTime="2026-02-27 09:11:27.053514775 +0000 UTC m=+2585.447916395" watchObservedRunningTime="2026-02-27 09:11:27.058446934 +0000 UTC m=+2585.452848564" Feb 27 09:11:29 crc kubenswrapper[4906]: I0227 09:11:29.553723 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:11:29 crc kubenswrapper[4906]: E0227 09:11:29.554727 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:11:40 crc kubenswrapper[4906]: I0227 09:11:40.553244 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:11:40 crc kubenswrapper[4906]: E0227 09:11:40.554159 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:11:53 crc kubenswrapper[4906]: I0227 09:11:53.552526 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:11:53 crc kubenswrapper[4906]: E0227 09:11:53.553689 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:12:00 crc kubenswrapper[4906]: I0227 09:12:00.154197 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536392-9glgm"] Feb 27 09:12:00 crc kubenswrapper[4906]: I0227 09:12:00.156751 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536392-9glgm" Feb 27 09:12:00 crc kubenswrapper[4906]: I0227 09:12:00.162734 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 09:12:00 crc kubenswrapper[4906]: I0227 09:12:00.163125 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 09:12:00 crc kubenswrapper[4906]: I0227 09:12:00.163832 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536392-9glgm"] Feb 27 09:12:00 crc kubenswrapper[4906]: I0227 09:12:00.165963 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 09:12:00 crc kubenswrapper[4906]: I0227 09:12:00.186242 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ps5sg\" (UniqueName: \"kubernetes.io/projected/098f1145-2bd1-40b6-9349-38409d6b89a9-kube-api-access-ps5sg\") pod \"auto-csr-approver-29536392-9glgm\" (UID: \"098f1145-2bd1-40b6-9349-38409d6b89a9\") " pod="openshift-infra/auto-csr-approver-29536392-9glgm" Feb 27 09:12:00 crc kubenswrapper[4906]: I0227 09:12:00.288414 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ps5sg\" (UniqueName: \"kubernetes.io/projected/098f1145-2bd1-40b6-9349-38409d6b89a9-kube-api-access-ps5sg\") pod \"auto-csr-approver-29536392-9glgm\" (UID: \"098f1145-2bd1-40b6-9349-38409d6b89a9\") " pod="openshift-infra/auto-csr-approver-29536392-9glgm" Feb 27 09:12:00 crc kubenswrapper[4906]: I0227 09:12:00.309732 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ps5sg\" (UniqueName: \"kubernetes.io/projected/098f1145-2bd1-40b6-9349-38409d6b89a9-kube-api-access-ps5sg\") pod \"auto-csr-approver-29536392-9glgm\" (UID: \"098f1145-2bd1-40b6-9349-38409d6b89a9\") " pod="openshift-infra/auto-csr-approver-29536392-9glgm" Feb 27 09:12:00 crc kubenswrapper[4906]: I0227 09:12:00.476520 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536392-9glgm" Feb 27 09:12:00 crc kubenswrapper[4906]: I0227 09:12:00.968133 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536392-9glgm"] Feb 27 09:12:01 crc kubenswrapper[4906]: I0227 09:12:01.355313 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536392-9glgm" event={"ID":"098f1145-2bd1-40b6-9349-38409d6b89a9","Type":"ContainerStarted","Data":"a906c86b746dd55e32812eeea3d8d083a7b4355d9ee1965a404b1c3468635215"} Feb 27 09:12:02 crc kubenswrapper[4906]: I0227 09:12:02.366832 4906 generic.go:334] "Generic (PLEG): container finished" podID="098f1145-2bd1-40b6-9349-38409d6b89a9" containerID="52d9da2720ab9eb9a68c8309980f87887bd456fe8cf7d7575333bbfb1a4452bb" exitCode=0 Feb 27 09:12:02 crc kubenswrapper[4906]: I0227 09:12:02.367014 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536392-9glgm" event={"ID":"098f1145-2bd1-40b6-9349-38409d6b89a9","Type":"ContainerDied","Data":"52d9da2720ab9eb9a68c8309980f87887bd456fe8cf7d7575333bbfb1a4452bb"} Feb 27 09:12:03 crc kubenswrapper[4906]: I0227 09:12:03.737422 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536392-9glgm" Feb 27 09:12:03 crc kubenswrapper[4906]: I0227 09:12:03.862035 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ps5sg\" (UniqueName: \"kubernetes.io/projected/098f1145-2bd1-40b6-9349-38409d6b89a9-kube-api-access-ps5sg\") pod \"098f1145-2bd1-40b6-9349-38409d6b89a9\" (UID: \"098f1145-2bd1-40b6-9349-38409d6b89a9\") " Feb 27 09:12:03 crc kubenswrapper[4906]: I0227 09:12:03.867195 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/098f1145-2bd1-40b6-9349-38409d6b89a9-kube-api-access-ps5sg" (OuterVolumeSpecName: "kube-api-access-ps5sg") pod "098f1145-2bd1-40b6-9349-38409d6b89a9" (UID: "098f1145-2bd1-40b6-9349-38409d6b89a9"). InnerVolumeSpecName "kube-api-access-ps5sg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:12:03 crc kubenswrapper[4906]: I0227 09:12:03.965056 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ps5sg\" (UniqueName: \"kubernetes.io/projected/098f1145-2bd1-40b6-9349-38409d6b89a9-kube-api-access-ps5sg\") on node \"crc\" DevicePath \"\"" Feb 27 09:12:04 crc kubenswrapper[4906]: I0227 09:12:04.388793 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536392-9glgm" event={"ID":"098f1145-2bd1-40b6-9349-38409d6b89a9","Type":"ContainerDied","Data":"a906c86b746dd55e32812eeea3d8d083a7b4355d9ee1965a404b1c3468635215"} Feb 27 09:12:04 crc kubenswrapper[4906]: I0227 09:12:04.389152 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a906c86b746dd55e32812eeea3d8d083a7b4355d9ee1965a404b1c3468635215" Feb 27 09:12:04 crc kubenswrapper[4906]: I0227 09:12:04.388840 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536392-9glgm" Feb 27 09:12:04 crc kubenswrapper[4906]: I0227 09:12:04.838481 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536386-p9dx2"] Feb 27 09:12:04 crc kubenswrapper[4906]: I0227 09:12:04.848446 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536386-p9dx2"] Feb 27 09:12:04 crc kubenswrapper[4906]: I0227 09:12:04.860845 4906 scope.go:117] "RemoveContainer" containerID="6e1d3f2566549e2f983f9fd2c21b99c8fb56b26914a11da15cc3a52a9fe030e3" Feb 27 09:12:04 crc kubenswrapper[4906]: I0227 09:12:04.914413 4906 scope.go:117] "RemoveContainer" containerID="183b9c8796a2a4dc84d9a95787e9ff9f29c380915780323017a6449162019103" Feb 27 09:12:04 crc kubenswrapper[4906]: I0227 09:12:04.939623 4906 scope.go:117] "RemoveContainer" containerID="8c121c1bbf9d300f4f6925c3df2f02457e83de58be1291b8654b15989f722c5a" Feb 27 09:12:05 crc kubenswrapper[4906]: I0227 09:12:05.553525 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:12:05 crc kubenswrapper[4906]: E0227 09:12:05.554062 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:12:06 crc kubenswrapper[4906]: I0227 09:12:06.569521 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="134ce321-7d7a-4c13-a08b-26a893ba4473" path="/var/lib/kubelet/pods/134ce321-7d7a-4c13-a08b-26a893ba4473/volumes" Feb 27 09:12:19 crc kubenswrapper[4906]: I0227 09:12:19.552357 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:12:19 crc kubenswrapper[4906]: E0227 09:12:19.553488 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:12:32 crc kubenswrapper[4906]: I0227 09:12:32.562285 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:12:32 crc kubenswrapper[4906]: E0227 09:12:32.563134 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:12:44 crc kubenswrapper[4906]: I0227 09:12:44.552111 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:12:44 crc kubenswrapper[4906]: E0227 09:12:44.552858 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:12:56 crc kubenswrapper[4906]: I0227 09:12:56.552803 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:12:56 crc kubenswrapper[4906]: E0227 09:12:56.553538 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:13:04 crc kubenswrapper[4906]: I0227 09:13:04.991237 4906 scope.go:117] "RemoveContainer" containerID="a75bd4048de21da9f2b85f84da3e3c490b0dc176ba18ae0f87a75926bcd0591b" Feb 27 09:13:08 crc kubenswrapper[4906]: I0227 09:13:08.553636 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:13:08 crc kubenswrapper[4906]: E0227 09:13:08.554760 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:13:20 crc kubenswrapper[4906]: I0227 09:13:20.552919 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:13:20 crc kubenswrapper[4906]: E0227 09:13:20.555368 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:13:31 crc kubenswrapper[4906]: I0227 09:13:31.553241 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:13:31 crc kubenswrapper[4906]: E0227 09:13:31.554338 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:13:43 crc kubenswrapper[4906]: I0227 09:13:43.553006 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:13:43 crc kubenswrapper[4906]: E0227 09:13:43.554185 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:13:50 crc kubenswrapper[4906]: I0227 09:13:50.468582 4906 generic.go:334] "Generic (PLEG): container finished" podID="f9df5a61-7fb3-4eb6-adc5-75d074d56a0d" containerID="3bae194c97ba5915930786c28d4c2c3dbfef04c35e006804c002be7c627c6681" exitCode=0 Feb 27 09:13:50 crc kubenswrapper[4906]: I0227 09:13:50.468676 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" event={"ID":"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d","Type":"ContainerDied","Data":"3bae194c97ba5915930786c28d4c2c3dbfef04c35e006804c002be7c627c6681"} Feb 27 09:13:51 crc kubenswrapper[4906]: I0227 09:13:51.982551 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.081205 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-ssh-key-openstack-edpm-ipam\") pod \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.081415 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-migration-ssh-key-1\") pod \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.081503 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-3\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-3\") pod \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.081540 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-migration-ssh-key-0\") pod \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.081576 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-0\") pod \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.082419 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-inventory\") pod \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.082644 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-combined-ca-bundle\") pod \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.082687 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-extra-config-0\") pod \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.082748 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-1\") pod \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.083152 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k728f\" (UniqueName: \"kubernetes.io/projected/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-kube-api-access-k728f\") pod \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.083208 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-2\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-2\") pod \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\" (UID: \"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d\") " Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.090047 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-kube-api-access-k728f" (OuterVolumeSpecName: "kube-api-access-k728f") pod "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d" (UID: "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d"). InnerVolumeSpecName "kube-api-access-k728f". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.092148 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d" (UID: "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.113613 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d" (UID: "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.116327 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-3" (OuterVolumeSpecName: "nova-cell1-compute-config-3") pod "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d" (UID: "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d"). InnerVolumeSpecName "nova-cell1-compute-config-3". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.117425 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-inventory" (OuterVolumeSpecName: "inventory") pod "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d" (UID: "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.120395 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d" (UID: "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.124250 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d" (UID: "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.125742 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d" (UID: "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.126278 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d" (UID: "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.131462 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d" (UID: "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.136799 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-2" (OuterVolumeSpecName: "nova-cell1-compute-config-2") pod "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d" (UID: "f9df5a61-7fb3-4eb6-adc5-75d074d56a0d"). InnerVolumeSpecName "nova-cell1-compute-config-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.188182 4906 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.188237 4906 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.188260 4906 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.188282 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k728f\" (UniqueName: \"kubernetes.io/projected/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-kube-api-access-k728f\") on node \"crc\" DevicePath \"\"" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.188306 4906 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-2\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-2\") on node \"crc\" DevicePath \"\"" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.188325 4906 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.188344 4906 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.188364 4906 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-3\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-3\") on node \"crc\" DevicePath \"\"" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.188386 4906 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.188405 4906 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.188425 4906 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f9df5a61-7fb3-4eb6-adc5-75d074d56a0d-inventory\") on node \"crc\" DevicePath \"\"" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.488419 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" event={"ID":"f9df5a61-7fb3-4eb6-adc5-75d074d56a0d","Type":"ContainerDied","Data":"052c9526f5613f4db232393420725f0a84d55ec6e8175f74cf222f79ec500373"} Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.488468 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="052c9526f5613f4db232393420725f0a84d55ec6e8175f74cf222f79ec500373" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.488484 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-sqg9z" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.622817 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf"] Feb 27 09:13:52 crc kubenswrapper[4906]: E0227 09:13:52.623409 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="098f1145-2bd1-40b6-9349-38409d6b89a9" containerName="oc" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.623429 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="098f1145-2bd1-40b6-9349-38409d6b89a9" containerName="oc" Feb 27 09:13:52 crc kubenswrapper[4906]: E0227 09:13:52.623451 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9df5a61-7fb3-4eb6-adc5-75d074d56a0d" containerName="nova-edpm-deployment-openstack-edpm-ipam" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.623458 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9df5a61-7fb3-4eb6-adc5-75d074d56a0d" containerName="nova-edpm-deployment-openstack-edpm-ipam" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.623712 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="098f1145-2bd1-40b6-9349-38409d6b89a9" containerName="oc" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.623740 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9df5a61-7fb3-4eb6-adc5-75d074d56a0d" containerName="nova-edpm-deployment-openstack-edpm-ipam" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.624774 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.630567 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-d466j" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.630800 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.630982 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.631176 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.631378 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.658355 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf"] Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.699325 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ssh-key-openstack-edpm-ipam\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.699500 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.699555 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hzlxp\" (UniqueName: \"kubernetes.io/projected/a02ebde9-1894-4df1-a904-7d898d684871-kube-api-access-hzlxp\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.699916 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.700037 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.700069 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.700606 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.803462 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.803582 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ssh-key-openstack-edpm-ipam\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.803639 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.803660 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hzlxp\" (UniqueName: \"kubernetes.io/projected/a02ebde9-1894-4df1-a904-7d898d684871-kube-api-access-hzlxp\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.803685 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.803715 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.803737 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.809458 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.813259 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ssh-key-openstack-edpm-ipam\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.817613 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.819308 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.819519 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.819836 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.823991 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hzlxp\" (UniqueName: \"kubernetes.io/projected/a02ebde9-1894-4df1-a904-7d898d684871-kube-api-access-hzlxp\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-jkswf\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:52 crc kubenswrapper[4906]: I0227 09:13:52.949683 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:13:53 crc kubenswrapper[4906]: I0227 09:13:53.495896 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf"] Feb 27 09:13:54 crc kubenswrapper[4906]: I0227 09:13:54.513231 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" event={"ID":"a02ebde9-1894-4df1-a904-7d898d684871","Type":"ContainerStarted","Data":"146e2a629869da87eed2f84b00f28a6d1f0a4ef52d8609f2b83ff12f9caa97b6"} Feb 27 09:13:54 crc kubenswrapper[4906]: I0227 09:13:54.513744 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" event={"ID":"a02ebde9-1894-4df1-a904-7d898d684871","Type":"ContainerStarted","Data":"bb03b6d185b8936385904214c6746e7589583657090bbb7e0f84b34af00e6b35"} Feb 27 09:13:54 crc kubenswrapper[4906]: I0227 09:13:54.550039 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" podStartSLOduration=1.88025927 podStartE2EDuration="2.550015808s" podCreationTimestamp="2026-02-27 09:13:52 +0000 UTC" firstStartedPulling="2026-02-27 09:13:53.502031258 +0000 UTC m=+2731.896432868" lastFinishedPulling="2026-02-27 09:13:54.171781116 +0000 UTC m=+2732.566189406" observedRunningTime="2026-02-27 09:13:54.543656901 +0000 UTC m=+2732.938058551" watchObservedRunningTime="2026-02-27 09:13:54.550015808 +0000 UTC m=+2732.944417418" Feb 27 09:13:56 crc kubenswrapper[4906]: I0227 09:13:56.553087 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:13:56 crc kubenswrapper[4906]: E0227 09:13:56.553981 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:14:00 crc kubenswrapper[4906]: I0227 09:14:00.157409 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536394-p2kc4"] Feb 27 09:14:00 crc kubenswrapper[4906]: I0227 09:14:00.159531 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536394-p2kc4" Feb 27 09:14:00 crc kubenswrapper[4906]: I0227 09:14:00.162089 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 09:14:00 crc kubenswrapper[4906]: I0227 09:14:00.163065 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 09:14:00 crc kubenswrapper[4906]: I0227 09:14:00.164329 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 09:14:00 crc kubenswrapper[4906]: I0227 09:14:00.181599 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536394-p2kc4"] Feb 27 09:14:00 crc kubenswrapper[4906]: I0227 09:14:00.289133 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2p77\" (UniqueName: \"kubernetes.io/projected/2f43478c-4a21-41ef-a8c3-79580edca361-kube-api-access-v2p77\") pod \"auto-csr-approver-29536394-p2kc4\" (UID: \"2f43478c-4a21-41ef-a8c3-79580edca361\") " pod="openshift-infra/auto-csr-approver-29536394-p2kc4" Feb 27 09:14:00 crc kubenswrapper[4906]: I0227 09:14:00.391506 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2p77\" (UniqueName: \"kubernetes.io/projected/2f43478c-4a21-41ef-a8c3-79580edca361-kube-api-access-v2p77\") pod \"auto-csr-approver-29536394-p2kc4\" (UID: \"2f43478c-4a21-41ef-a8c3-79580edca361\") " pod="openshift-infra/auto-csr-approver-29536394-p2kc4" Feb 27 09:14:00 crc kubenswrapper[4906]: I0227 09:14:00.415411 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2p77\" (UniqueName: \"kubernetes.io/projected/2f43478c-4a21-41ef-a8c3-79580edca361-kube-api-access-v2p77\") pod \"auto-csr-approver-29536394-p2kc4\" (UID: \"2f43478c-4a21-41ef-a8c3-79580edca361\") " pod="openshift-infra/auto-csr-approver-29536394-p2kc4" Feb 27 09:14:00 crc kubenswrapper[4906]: I0227 09:14:00.487199 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536394-p2kc4" Feb 27 09:14:00 crc kubenswrapper[4906]: I0227 09:14:00.785702 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536394-p2kc4"] Feb 27 09:14:01 crc kubenswrapper[4906]: I0227 09:14:01.600372 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536394-p2kc4" event={"ID":"2f43478c-4a21-41ef-a8c3-79580edca361","Type":"ContainerStarted","Data":"5bd31441c7d76961ab9046b9ba9926db672ef8cb200ddd01a385ad050861e8ed"} Feb 27 09:14:02 crc kubenswrapper[4906]: I0227 09:14:02.617477 4906 generic.go:334] "Generic (PLEG): container finished" podID="2f43478c-4a21-41ef-a8c3-79580edca361" containerID="c3ffb47e11d18829f3862b745ead451846356fefd9446c9e6a59a5ec3730745e" exitCode=0 Feb 27 09:14:02 crc kubenswrapper[4906]: I0227 09:14:02.617659 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536394-p2kc4" event={"ID":"2f43478c-4a21-41ef-a8c3-79580edca361","Type":"ContainerDied","Data":"c3ffb47e11d18829f3862b745ead451846356fefd9446c9e6a59a5ec3730745e"} Feb 27 09:14:03 crc kubenswrapper[4906]: I0227 09:14:03.998326 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536394-p2kc4" Feb 27 09:14:04 crc kubenswrapper[4906]: I0227 09:14:04.086608 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v2p77\" (UniqueName: \"kubernetes.io/projected/2f43478c-4a21-41ef-a8c3-79580edca361-kube-api-access-v2p77\") pod \"2f43478c-4a21-41ef-a8c3-79580edca361\" (UID: \"2f43478c-4a21-41ef-a8c3-79580edca361\") " Feb 27 09:14:04 crc kubenswrapper[4906]: I0227 09:14:04.092708 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f43478c-4a21-41ef-a8c3-79580edca361-kube-api-access-v2p77" (OuterVolumeSpecName: "kube-api-access-v2p77") pod "2f43478c-4a21-41ef-a8c3-79580edca361" (UID: "2f43478c-4a21-41ef-a8c3-79580edca361"). InnerVolumeSpecName "kube-api-access-v2p77". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:14:04 crc kubenswrapper[4906]: I0227 09:14:04.189522 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v2p77\" (UniqueName: \"kubernetes.io/projected/2f43478c-4a21-41ef-a8c3-79580edca361-kube-api-access-v2p77\") on node \"crc\" DevicePath \"\"" Feb 27 09:14:04 crc kubenswrapper[4906]: I0227 09:14:04.640740 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536394-p2kc4" event={"ID":"2f43478c-4a21-41ef-a8c3-79580edca361","Type":"ContainerDied","Data":"5bd31441c7d76961ab9046b9ba9926db672ef8cb200ddd01a385ad050861e8ed"} Feb 27 09:14:04 crc kubenswrapper[4906]: I0227 09:14:04.640804 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5bd31441c7d76961ab9046b9ba9926db672ef8cb200ddd01a385ad050861e8ed" Feb 27 09:14:04 crc kubenswrapper[4906]: I0227 09:14:04.640803 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536394-p2kc4" Feb 27 09:14:05 crc kubenswrapper[4906]: I0227 09:14:05.090306 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536388-qp7tr"] Feb 27 09:14:05 crc kubenswrapper[4906]: I0227 09:14:05.103309 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536388-qp7tr"] Feb 27 09:14:06 crc kubenswrapper[4906]: I0227 09:14:06.563550 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db" path="/var/lib/kubelet/pods/d5517b0c-b9f4-4cf1-bfc5-f747d1d3e2db/volumes" Feb 27 09:14:10 crc kubenswrapper[4906]: I0227 09:14:10.552793 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:14:10 crc kubenswrapper[4906]: E0227 09:14:10.554706 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:14:24 crc kubenswrapper[4906]: I0227 09:14:24.553473 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:14:24 crc kubenswrapper[4906]: E0227 09:14:24.554391 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:14:35 crc kubenswrapper[4906]: I0227 09:14:35.552408 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:14:35 crc kubenswrapper[4906]: E0227 09:14:35.553151 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:14:36 crc kubenswrapper[4906]: I0227 09:14:36.804292 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fm65z"] Feb 27 09:14:36 crc kubenswrapper[4906]: E0227 09:14:36.804745 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f43478c-4a21-41ef-a8c3-79580edca361" containerName="oc" Feb 27 09:14:36 crc kubenswrapper[4906]: I0227 09:14:36.804760 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f43478c-4a21-41ef-a8c3-79580edca361" containerName="oc" Feb 27 09:14:36 crc kubenswrapper[4906]: I0227 09:14:36.804996 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f43478c-4a21-41ef-a8c3-79580edca361" containerName="oc" Feb 27 09:14:36 crc kubenswrapper[4906]: I0227 09:14:36.806336 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fm65z" Feb 27 09:14:36 crc kubenswrapper[4906]: I0227 09:14:36.816071 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fm65z"] Feb 27 09:14:36 crc kubenswrapper[4906]: I0227 09:14:36.924834 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dff761f-d6d6-43a9-a737-5653d0e0cbb3-utilities\") pod \"redhat-operators-fm65z\" (UID: \"4dff761f-d6d6-43a9-a737-5653d0e0cbb3\") " pod="openshift-marketplace/redhat-operators-fm65z" Feb 27 09:14:36 crc kubenswrapper[4906]: I0227 09:14:36.924901 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csjd7\" (UniqueName: \"kubernetes.io/projected/4dff761f-d6d6-43a9-a737-5653d0e0cbb3-kube-api-access-csjd7\") pod \"redhat-operators-fm65z\" (UID: \"4dff761f-d6d6-43a9-a737-5653d0e0cbb3\") " pod="openshift-marketplace/redhat-operators-fm65z" Feb 27 09:14:36 crc kubenswrapper[4906]: I0227 09:14:36.925318 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dff761f-d6d6-43a9-a737-5653d0e0cbb3-catalog-content\") pod \"redhat-operators-fm65z\" (UID: \"4dff761f-d6d6-43a9-a737-5653d0e0cbb3\") " pod="openshift-marketplace/redhat-operators-fm65z" Feb 27 09:14:37 crc kubenswrapper[4906]: I0227 09:14:37.027690 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dff761f-d6d6-43a9-a737-5653d0e0cbb3-utilities\") pod \"redhat-operators-fm65z\" (UID: \"4dff761f-d6d6-43a9-a737-5653d0e0cbb3\") " pod="openshift-marketplace/redhat-operators-fm65z" Feb 27 09:14:37 crc kubenswrapper[4906]: I0227 09:14:37.027754 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csjd7\" (UniqueName: \"kubernetes.io/projected/4dff761f-d6d6-43a9-a737-5653d0e0cbb3-kube-api-access-csjd7\") pod \"redhat-operators-fm65z\" (UID: \"4dff761f-d6d6-43a9-a737-5653d0e0cbb3\") " pod="openshift-marketplace/redhat-operators-fm65z" Feb 27 09:14:37 crc kubenswrapper[4906]: I0227 09:14:37.027936 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dff761f-d6d6-43a9-a737-5653d0e0cbb3-catalog-content\") pod \"redhat-operators-fm65z\" (UID: \"4dff761f-d6d6-43a9-a737-5653d0e0cbb3\") " pod="openshift-marketplace/redhat-operators-fm65z" Feb 27 09:14:37 crc kubenswrapper[4906]: I0227 09:14:37.028370 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dff761f-d6d6-43a9-a737-5653d0e0cbb3-utilities\") pod \"redhat-operators-fm65z\" (UID: \"4dff761f-d6d6-43a9-a737-5653d0e0cbb3\") " pod="openshift-marketplace/redhat-operators-fm65z" Feb 27 09:14:37 crc kubenswrapper[4906]: I0227 09:14:37.028407 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dff761f-d6d6-43a9-a737-5653d0e0cbb3-catalog-content\") pod \"redhat-operators-fm65z\" (UID: \"4dff761f-d6d6-43a9-a737-5653d0e0cbb3\") " pod="openshift-marketplace/redhat-operators-fm65z" Feb 27 09:14:37 crc kubenswrapper[4906]: I0227 09:14:37.047528 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csjd7\" (UniqueName: \"kubernetes.io/projected/4dff761f-d6d6-43a9-a737-5653d0e0cbb3-kube-api-access-csjd7\") pod \"redhat-operators-fm65z\" (UID: \"4dff761f-d6d6-43a9-a737-5653d0e0cbb3\") " pod="openshift-marketplace/redhat-operators-fm65z" Feb 27 09:14:37 crc kubenswrapper[4906]: I0227 09:14:37.129335 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fm65z" Feb 27 09:14:37 crc kubenswrapper[4906]: I0227 09:14:37.620070 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fm65z"] Feb 27 09:14:37 crc kubenswrapper[4906]: I0227 09:14:37.964596 4906 generic.go:334] "Generic (PLEG): container finished" podID="4dff761f-d6d6-43a9-a737-5653d0e0cbb3" containerID="f13bf1a0e588c7dda4122a0a8c4e4f4b425ed15c9bb558e59098a500c77140e1" exitCode=0 Feb 27 09:14:37 crc kubenswrapper[4906]: I0227 09:14:37.964656 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fm65z" event={"ID":"4dff761f-d6d6-43a9-a737-5653d0e0cbb3","Type":"ContainerDied","Data":"f13bf1a0e588c7dda4122a0a8c4e4f4b425ed15c9bb558e59098a500c77140e1"} Feb 27 09:14:37 crc kubenswrapper[4906]: I0227 09:14:37.964958 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fm65z" event={"ID":"4dff761f-d6d6-43a9-a737-5653d0e0cbb3","Type":"ContainerStarted","Data":"9fdeb0651bd424ce145097ba67ac4eb05e759ffdcb26e6f0afc1fa335fecd28d"} Feb 27 09:14:38 crc kubenswrapper[4906]: I0227 09:14:38.976929 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fm65z" event={"ID":"4dff761f-d6d6-43a9-a737-5653d0e0cbb3","Type":"ContainerStarted","Data":"051a24208034f8d1497e36f14491e3288c89c3e45dd459b02576f7ca76180be8"} Feb 27 09:14:39 crc kubenswrapper[4906]: I0227 09:14:39.992157 4906 generic.go:334] "Generic (PLEG): container finished" podID="4dff761f-d6d6-43a9-a737-5653d0e0cbb3" containerID="051a24208034f8d1497e36f14491e3288c89c3e45dd459b02576f7ca76180be8" exitCode=0 Feb 27 09:14:39 crc kubenswrapper[4906]: I0227 09:14:39.992279 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fm65z" event={"ID":"4dff761f-d6d6-43a9-a737-5653d0e0cbb3","Type":"ContainerDied","Data":"051a24208034f8d1497e36f14491e3288c89c3e45dd459b02576f7ca76180be8"} Feb 27 09:14:41 crc kubenswrapper[4906]: I0227 09:14:41.007278 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fm65z" event={"ID":"4dff761f-d6d6-43a9-a737-5653d0e0cbb3","Type":"ContainerStarted","Data":"9957337ff3eef831f2ceb28f476e54249c87b9ab0219fb5c6fd9bb4c1d69f753"} Feb 27 09:14:41 crc kubenswrapper[4906]: I0227 09:14:41.053264 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fm65z" podStartSLOduration=2.5961390939999998 podStartE2EDuration="5.053240487s" podCreationTimestamp="2026-02-27 09:14:36 +0000 UTC" firstStartedPulling="2026-02-27 09:14:37.966610476 +0000 UTC m=+2776.361012086" lastFinishedPulling="2026-02-27 09:14:40.423711869 +0000 UTC m=+2778.818113479" observedRunningTime="2026-02-27 09:14:41.046031408 +0000 UTC m=+2779.440433018" watchObservedRunningTime="2026-02-27 09:14:41.053240487 +0000 UTC m=+2779.447642097" Feb 27 09:14:47 crc kubenswrapper[4906]: I0227 09:14:47.130219 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fm65z" Feb 27 09:14:47 crc kubenswrapper[4906]: I0227 09:14:47.130815 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fm65z" Feb 27 09:14:47 crc kubenswrapper[4906]: I0227 09:14:47.177730 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fm65z" Feb 27 09:14:47 crc kubenswrapper[4906]: I0227 09:14:47.553235 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:14:47 crc kubenswrapper[4906]: E0227 09:14:47.553950 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:14:48 crc kubenswrapper[4906]: I0227 09:14:48.145975 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fm65z" Feb 27 09:14:48 crc kubenswrapper[4906]: I0227 09:14:48.200502 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fm65z"] Feb 27 09:14:50 crc kubenswrapper[4906]: I0227 09:14:50.094015 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fm65z" podUID="4dff761f-d6d6-43a9-a737-5653d0e0cbb3" containerName="registry-server" containerID="cri-o://9957337ff3eef831f2ceb28f476e54249c87b9ab0219fb5c6fd9bb4c1d69f753" gracePeriod=2 Feb 27 09:14:50 crc kubenswrapper[4906]: I0227 09:14:50.619602 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fm65z" Feb 27 09:14:50 crc kubenswrapper[4906]: I0227 09:14:50.765204 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-csjd7\" (UniqueName: \"kubernetes.io/projected/4dff761f-d6d6-43a9-a737-5653d0e0cbb3-kube-api-access-csjd7\") pod \"4dff761f-d6d6-43a9-a737-5653d0e0cbb3\" (UID: \"4dff761f-d6d6-43a9-a737-5653d0e0cbb3\") " Feb 27 09:14:50 crc kubenswrapper[4906]: I0227 09:14:50.765358 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dff761f-d6d6-43a9-a737-5653d0e0cbb3-utilities\") pod \"4dff761f-d6d6-43a9-a737-5653d0e0cbb3\" (UID: \"4dff761f-d6d6-43a9-a737-5653d0e0cbb3\") " Feb 27 09:14:50 crc kubenswrapper[4906]: I0227 09:14:50.765444 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dff761f-d6d6-43a9-a737-5653d0e0cbb3-catalog-content\") pod \"4dff761f-d6d6-43a9-a737-5653d0e0cbb3\" (UID: \"4dff761f-d6d6-43a9-a737-5653d0e0cbb3\") " Feb 27 09:14:50 crc kubenswrapper[4906]: I0227 09:14:50.767255 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dff761f-d6d6-43a9-a737-5653d0e0cbb3-utilities" (OuterVolumeSpecName: "utilities") pod "4dff761f-d6d6-43a9-a737-5653d0e0cbb3" (UID: "4dff761f-d6d6-43a9-a737-5653d0e0cbb3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:14:50 crc kubenswrapper[4906]: I0227 09:14:50.776948 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4dff761f-d6d6-43a9-a737-5653d0e0cbb3-kube-api-access-csjd7" (OuterVolumeSpecName: "kube-api-access-csjd7") pod "4dff761f-d6d6-43a9-a737-5653d0e0cbb3" (UID: "4dff761f-d6d6-43a9-a737-5653d0e0cbb3"). InnerVolumeSpecName "kube-api-access-csjd7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:14:50 crc kubenswrapper[4906]: I0227 09:14:50.868627 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-csjd7\" (UniqueName: \"kubernetes.io/projected/4dff761f-d6d6-43a9-a737-5653d0e0cbb3-kube-api-access-csjd7\") on node \"crc\" DevicePath \"\"" Feb 27 09:14:50 crc kubenswrapper[4906]: I0227 09:14:50.870205 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4dff761f-d6d6-43a9-a737-5653d0e0cbb3-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 09:14:50 crc kubenswrapper[4906]: I0227 09:14:50.905353 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4dff761f-d6d6-43a9-a737-5653d0e0cbb3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4dff761f-d6d6-43a9-a737-5653d0e0cbb3" (UID: "4dff761f-d6d6-43a9-a737-5653d0e0cbb3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:14:50 crc kubenswrapper[4906]: I0227 09:14:50.971987 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4dff761f-d6d6-43a9-a737-5653d0e0cbb3-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 09:14:51 crc kubenswrapper[4906]: I0227 09:14:51.107294 4906 generic.go:334] "Generic (PLEG): container finished" podID="4dff761f-d6d6-43a9-a737-5653d0e0cbb3" containerID="9957337ff3eef831f2ceb28f476e54249c87b9ab0219fb5c6fd9bb4c1d69f753" exitCode=0 Feb 27 09:14:51 crc kubenswrapper[4906]: I0227 09:14:51.107345 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fm65z" event={"ID":"4dff761f-d6d6-43a9-a737-5653d0e0cbb3","Type":"ContainerDied","Data":"9957337ff3eef831f2ceb28f476e54249c87b9ab0219fb5c6fd9bb4c1d69f753"} Feb 27 09:14:51 crc kubenswrapper[4906]: I0227 09:14:51.107378 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fm65z" event={"ID":"4dff761f-d6d6-43a9-a737-5653d0e0cbb3","Type":"ContainerDied","Data":"9fdeb0651bd424ce145097ba67ac4eb05e759ffdcb26e6f0afc1fa335fecd28d"} Feb 27 09:14:51 crc kubenswrapper[4906]: I0227 09:14:51.107374 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fm65z" Feb 27 09:14:51 crc kubenswrapper[4906]: I0227 09:14:51.107398 4906 scope.go:117] "RemoveContainer" containerID="9957337ff3eef831f2ceb28f476e54249c87b9ab0219fb5c6fd9bb4c1d69f753" Feb 27 09:14:51 crc kubenswrapper[4906]: I0227 09:14:51.129505 4906 scope.go:117] "RemoveContainer" containerID="051a24208034f8d1497e36f14491e3288c89c3e45dd459b02576f7ca76180be8" Feb 27 09:14:51 crc kubenswrapper[4906]: I0227 09:14:51.162057 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fm65z"] Feb 27 09:14:51 crc kubenswrapper[4906]: I0227 09:14:51.166822 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fm65z"] Feb 27 09:14:51 crc kubenswrapper[4906]: I0227 09:14:51.172834 4906 scope.go:117] "RemoveContainer" containerID="f13bf1a0e588c7dda4122a0a8c4e4f4b425ed15c9bb558e59098a500c77140e1" Feb 27 09:14:51 crc kubenswrapper[4906]: I0227 09:14:51.199456 4906 scope.go:117] "RemoveContainer" containerID="9957337ff3eef831f2ceb28f476e54249c87b9ab0219fb5c6fd9bb4c1d69f753" Feb 27 09:14:51 crc kubenswrapper[4906]: E0227 09:14:51.200159 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9957337ff3eef831f2ceb28f476e54249c87b9ab0219fb5c6fd9bb4c1d69f753\": container with ID starting with 9957337ff3eef831f2ceb28f476e54249c87b9ab0219fb5c6fd9bb4c1d69f753 not found: ID does not exist" containerID="9957337ff3eef831f2ceb28f476e54249c87b9ab0219fb5c6fd9bb4c1d69f753" Feb 27 09:14:51 crc kubenswrapper[4906]: I0227 09:14:51.200216 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9957337ff3eef831f2ceb28f476e54249c87b9ab0219fb5c6fd9bb4c1d69f753"} err="failed to get container status \"9957337ff3eef831f2ceb28f476e54249c87b9ab0219fb5c6fd9bb4c1d69f753\": rpc error: code = NotFound desc = could not find container \"9957337ff3eef831f2ceb28f476e54249c87b9ab0219fb5c6fd9bb4c1d69f753\": container with ID starting with 9957337ff3eef831f2ceb28f476e54249c87b9ab0219fb5c6fd9bb4c1d69f753 not found: ID does not exist" Feb 27 09:14:51 crc kubenswrapper[4906]: I0227 09:14:51.200256 4906 scope.go:117] "RemoveContainer" containerID="051a24208034f8d1497e36f14491e3288c89c3e45dd459b02576f7ca76180be8" Feb 27 09:14:51 crc kubenswrapper[4906]: E0227 09:14:51.200692 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"051a24208034f8d1497e36f14491e3288c89c3e45dd459b02576f7ca76180be8\": container with ID starting with 051a24208034f8d1497e36f14491e3288c89c3e45dd459b02576f7ca76180be8 not found: ID does not exist" containerID="051a24208034f8d1497e36f14491e3288c89c3e45dd459b02576f7ca76180be8" Feb 27 09:14:51 crc kubenswrapper[4906]: I0227 09:14:51.200749 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"051a24208034f8d1497e36f14491e3288c89c3e45dd459b02576f7ca76180be8"} err="failed to get container status \"051a24208034f8d1497e36f14491e3288c89c3e45dd459b02576f7ca76180be8\": rpc error: code = NotFound desc = could not find container \"051a24208034f8d1497e36f14491e3288c89c3e45dd459b02576f7ca76180be8\": container with ID starting with 051a24208034f8d1497e36f14491e3288c89c3e45dd459b02576f7ca76180be8 not found: ID does not exist" Feb 27 09:14:51 crc kubenswrapper[4906]: I0227 09:14:51.200776 4906 scope.go:117] "RemoveContainer" containerID="f13bf1a0e588c7dda4122a0a8c4e4f4b425ed15c9bb558e59098a500c77140e1" Feb 27 09:14:51 crc kubenswrapper[4906]: E0227 09:14:51.201367 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f13bf1a0e588c7dda4122a0a8c4e4f4b425ed15c9bb558e59098a500c77140e1\": container with ID starting with f13bf1a0e588c7dda4122a0a8c4e4f4b425ed15c9bb558e59098a500c77140e1 not found: ID does not exist" containerID="f13bf1a0e588c7dda4122a0a8c4e4f4b425ed15c9bb558e59098a500c77140e1" Feb 27 09:14:51 crc kubenswrapper[4906]: I0227 09:14:51.201468 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f13bf1a0e588c7dda4122a0a8c4e4f4b425ed15c9bb558e59098a500c77140e1"} err="failed to get container status \"f13bf1a0e588c7dda4122a0a8c4e4f4b425ed15c9bb558e59098a500c77140e1\": rpc error: code = NotFound desc = could not find container \"f13bf1a0e588c7dda4122a0a8c4e4f4b425ed15c9bb558e59098a500c77140e1\": container with ID starting with f13bf1a0e588c7dda4122a0a8c4e4f4b425ed15c9bb558e59098a500c77140e1 not found: ID does not exist" Feb 27 09:14:52 crc kubenswrapper[4906]: I0227 09:14:52.568028 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4dff761f-d6d6-43a9-a737-5653d0e0cbb3" path="/var/lib/kubelet/pods/4dff761f-d6d6-43a9-a737-5653d0e0cbb3/volumes" Feb 27 09:15:00 crc kubenswrapper[4906]: I0227 09:15:00.159538 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29536395-6dk2w"] Feb 27 09:15:00 crc kubenswrapper[4906]: E0227 09:15:00.161498 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dff761f-d6d6-43a9-a737-5653d0e0cbb3" containerName="registry-server" Feb 27 09:15:00 crc kubenswrapper[4906]: I0227 09:15:00.161568 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dff761f-d6d6-43a9-a737-5653d0e0cbb3" containerName="registry-server" Feb 27 09:15:00 crc kubenswrapper[4906]: E0227 09:15:00.161655 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dff761f-d6d6-43a9-a737-5653d0e0cbb3" containerName="extract-content" Feb 27 09:15:00 crc kubenswrapper[4906]: I0227 09:15:00.161710 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dff761f-d6d6-43a9-a737-5653d0e0cbb3" containerName="extract-content" Feb 27 09:15:00 crc kubenswrapper[4906]: E0227 09:15:00.161761 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4dff761f-d6d6-43a9-a737-5653d0e0cbb3" containerName="extract-utilities" Feb 27 09:15:00 crc kubenswrapper[4906]: I0227 09:15:00.161825 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="4dff761f-d6d6-43a9-a737-5653d0e0cbb3" containerName="extract-utilities" Feb 27 09:15:00 crc kubenswrapper[4906]: I0227 09:15:00.162128 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="4dff761f-d6d6-43a9-a737-5653d0e0cbb3" containerName="registry-server" Feb 27 09:15:00 crc kubenswrapper[4906]: I0227 09:15:00.162843 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29536395-6dk2w" Feb 27 09:15:00 crc kubenswrapper[4906]: I0227 09:15:00.165127 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 27 09:15:00 crc kubenswrapper[4906]: I0227 09:15:00.165576 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 27 09:15:00 crc kubenswrapper[4906]: I0227 09:15:00.181908 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29536395-6dk2w"] Feb 27 09:15:00 crc kubenswrapper[4906]: I0227 09:15:00.307243 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f08f6bff-82c2-488a-868e-6595a3a90941-config-volume\") pod \"collect-profiles-29536395-6dk2w\" (UID: \"f08f6bff-82c2-488a-868e-6595a3a90941\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536395-6dk2w" Feb 27 09:15:00 crc kubenswrapper[4906]: I0227 09:15:00.307389 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f08f6bff-82c2-488a-868e-6595a3a90941-secret-volume\") pod \"collect-profiles-29536395-6dk2w\" (UID: \"f08f6bff-82c2-488a-868e-6595a3a90941\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536395-6dk2w" Feb 27 09:15:00 crc kubenswrapper[4906]: I0227 09:15:00.307438 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62rqm\" (UniqueName: \"kubernetes.io/projected/f08f6bff-82c2-488a-868e-6595a3a90941-kube-api-access-62rqm\") pod \"collect-profiles-29536395-6dk2w\" (UID: \"f08f6bff-82c2-488a-868e-6595a3a90941\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536395-6dk2w" Feb 27 09:15:00 crc kubenswrapper[4906]: I0227 09:15:00.409340 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f08f6bff-82c2-488a-868e-6595a3a90941-config-volume\") pod \"collect-profiles-29536395-6dk2w\" (UID: \"f08f6bff-82c2-488a-868e-6595a3a90941\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536395-6dk2w" Feb 27 09:15:00 crc kubenswrapper[4906]: I0227 09:15:00.409443 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f08f6bff-82c2-488a-868e-6595a3a90941-secret-volume\") pod \"collect-profiles-29536395-6dk2w\" (UID: \"f08f6bff-82c2-488a-868e-6595a3a90941\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536395-6dk2w" Feb 27 09:15:00 crc kubenswrapper[4906]: I0227 09:15:00.409473 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62rqm\" (UniqueName: \"kubernetes.io/projected/f08f6bff-82c2-488a-868e-6595a3a90941-kube-api-access-62rqm\") pod \"collect-profiles-29536395-6dk2w\" (UID: \"f08f6bff-82c2-488a-868e-6595a3a90941\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536395-6dk2w" Feb 27 09:15:00 crc kubenswrapper[4906]: I0227 09:15:00.410457 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f08f6bff-82c2-488a-868e-6595a3a90941-config-volume\") pod \"collect-profiles-29536395-6dk2w\" (UID: \"f08f6bff-82c2-488a-868e-6595a3a90941\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536395-6dk2w" Feb 27 09:15:00 crc kubenswrapper[4906]: I0227 09:15:00.428710 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f08f6bff-82c2-488a-868e-6595a3a90941-secret-volume\") pod \"collect-profiles-29536395-6dk2w\" (UID: \"f08f6bff-82c2-488a-868e-6595a3a90941\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536395-6dk2w" Feb 27 09:15:00 crc kubenswrapper[4906]: I0227 09:15:00.430355 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62rqm\" (UniqueName: \"kubernetes.io/projected/f08f6bff-82c2-488a-868e-6595a3a90941-kube-api-access-62rqm\") pod \"collect-profiles-29536395-6dk2w\" (UID: \"f08f6bff-82c2-488a-868e-6595a3a90941\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29536395-6dk2w" Feb 27 09:15:00 crc kubenswrapper[4906]: I0227 09:15:00.508236 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29536395-6dk2w" Feb 27 09:15:00 crc kubenswrapper[4906]: I0227 09:15:00.552448 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:15:01 crc kubenswrapper[4906]: I0227 09:15:01.027305 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29536395-6dk2w"] Feb 27 09:15:01 crc kubenswrapper[4906]: I0227 09:15:01.223587 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29536395-6dk2w" event={"ID":"f08f6bff-82c2-488a-868e-6595a3a90941","Type":"ContainerStarted","Data":"b721a58ce2b6b21f3c57be05f51fb68e87bec4a0f35ab301d93b97fd786df102"} Feb 27 09:15:01 crc kubenswrapper[4906]: I0227 09:15:01.227440 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerStarted","Data":"c0ecca24c3dfb89c4c60c4af6e708b2178e936671802de96e3da9b5747bcaeb0"} Feb 27 09:15:02 crc kubenswrapper[4906]: I0227 09:15:02.240434 4906 generic.go:334] "Generic (PLEG): container finished" podID="f08f6bff-82c2-488a-868e-6595a3a90941" containerID="1467b7da8ea09d63de63fd741bed5124af4d75ab6744a5b585178a775c9d8ad7" exitCode=0 Feb 27 09:15:02 crc kubenswrapper[4906]: I0227 09:15:02.240527 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29536395-6dk2w" event={"ID":"f08f6bff-82c2-488a-868e-6595a3a90941","Type":"ContainerDied","Data":"1467b7da8ea09d63de63fd741bed5124af4d75ab6744a5b585178a775c9d8ad7"} Feb 27 09:15:03 crc kubenswrapper[4906]: I0227 09:15:03.624529 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29536395-6dk2w" Feb 27 09:15:03 crc kubenswrapper[4906]: I0227 09:15:03.784442 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f08f6bff-82c2-488a-868e-6595a3a90941-secret-volume\") pod \"f08f6bff-82c2-488a-868e-6595a3a90941\" (UID: \"f08f6bff-82c2-488a-868e-6595a3a90941\") " Feb 27 09:15:03 crc kubenswrapper[4906]: I0227 09:15:03.784559 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f08f6bff-82c2-488a-868e-6595a3a90941-config-volume\") pod \"f08f6bff-82c2-488a-868e-6595a3a90941\" (UID: \"f08f6bff-82c2-488a-868e-6595a3a90941\") " Feb 27 09:15:03 crc kubenswrapper[4906]: I0227 09:15:03.784611 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-62rqm\" (UniqueName: \"kubernetes.io/projected/f08f6bff-82c2-488a-868e-6595a3a90941-kube-api-access-62rqm\") pod \"f08f6bff-82c2-488a-868e-6595a3a90941\" (UID: \"f08f6bff-82c2-488a-868e-6595a3a90941\") " Feb 27 09:15:03 crc kubenswrapper[4906]: I0227 09:15:03.785589 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f08f6bff-82c2-488a-868e-6595a3a90941-config-volume" (OuterVolumeSpecName: "config-volume") pod "f08f6bff-82c2-488a-868e-6595a3a90941" (UID: "f08f6bff-82c2-488a-868e-6595a3a90941"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 09:15:03 crc kubenswrapper[4906]: I0227 09:15:03.794569 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f08f6bff-82c2-488a-868e-6595a3a90941-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "f08f6bff-82c2-488a-868e-6595a3a90941" (UID: "f08f6bff-82c2-488a-868e-6595a3a90941"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:15:03 crc kubenswrapper[4906]: I0227 09:15:03.794700 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f08f6bff-82c2-488a-868e-6595a3a90941-kube-api-access-62rqm" (OuterVolumeSpecName: "kube-api-access-62rqm") pod "f08f6bff-82c2-488a-868e-6595a3a90941" (UID: "f08f6bff-82c2-488a-868e-6595a3a90941"). InnerVolumeSpecName "kube-api-access-62rqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:15:03 crc kubenswrapper[4906]: I0227 09:15:03.886453 4906 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/f08f6bff-82c2-488a-868e-6595a3a90941-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 27 09:15:03 crc kubenswrapper[4906]: I0227 09:15:03.886486 4906 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/f08f6bff-82c2-488a-868e-6595a3a90941-config-volume\") on node \"crc\" DevicePath \"\"" Feb 27 09:15:03 crc kubenswrapper[4906]: I0227 09:15:03.886498 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-62rqm\" (UniqueName: \"kubernetes.io/projected/f08f6bff-82c2-488a-868e-6595a3a90941-kube-api-access-62rqm\") on node \"crc\" DevicePath \"\"" Feb 27 09:15:04 crc kubenswrapper[4906]: I0227 09:15:04.262547 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29536395-6dk2w" event={"ID":"f08f6bff-82c2-488a-868e-6595a3a90941","Type":"ContainerDied","Data":"b721a58ce2b6b21f3c57be05f51fb68e87bec4a0f35ab301d93b97fd786df102"} Feb 27 09:15:04 crc kubenswrapper[4906]: I0227 09:15:04.262594 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b721a58ce2b6b21f3c57be05f51fb68e87bec4a0f35ab301d93b97fd786df102" Feb 27 09:15:04 crc kubenswrapper[4906]: I0227 09:15:04.262601 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29536395-6dk2w" Feb 27 09:15:04 crc kubenswrapper[4906]: I0227 09:15:04.706744 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx"] Feb 27 09:15:04 crc kubenswrapper[4906]: I0227 09:15:04.717922 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29536350-kfpvx"] Feb 27 09:15:05 crc kubenswrapper[4906]: I0227 09:15:05.110563 4906 scope.go:117] "RemoveContainer" containerID="9598f356667d018348affe838a67a1466d2306cc873f1fe7692c3ff1389f183e" Feb 27 09:15:06 crc kubenswrapper[4906]: I0227 09:15:06.566331 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac646565-cd5f-405a-ad92-3f2afb51d5c8" path="/var/lib/kubelet/pods/ac646565-cd5f-405a-ad92-3f2afb51d5c8/volumes" Feb 27 09:16:00 crc kubenswrapper[4906]: I0227 09:16:00.155636 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536396-z667p"] Feb 27 09:16:00 crc kubenswrapper[4906]: E0227 09:16:00.156596 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f08f6bff-82c2-488a-868e-6595a3a90941" containerName="collect-profiles" Feb 27 09:16:00 crc kubenswrapper[4906]: I0227 09:16:00.156609 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f08f6bff-82c2-488a-868e-6595a3a90941" containerName="collect-profiles" Feb 27 09:16:00 crc kubenswrapper[4906]: I0227 09:16:00.156816 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f08f6bff-82c2-488a-868e-6595a3a90941" containerName="collect-profiles" Feb 27 09:16:00 crc kubenswrapper[4906]: I0227 09:16:00.157687 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536396-z667p" Feb 27 09:16:00 crc kubenswrapper[4906]: I0227 09:16:00.159768 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 09:16:00 crc kubenswrapper[4906]: I0227 09:16:00.159841 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 09:16:00 crc kubenswrapper[4906]: I0227 09:16:00.160928 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 09:16:00 crc kubenswrapper[4906]: I0227 09:16:00.166940 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536396-z667p"] Feb 27 09:16:00 crc kubenswrapper[4906]: I0227 09:16:00.185113 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qs8ww\" (UniqueName: \"kubernetes.io/projected/894b2016-b019-41f6-81ea-5d12d397e99f-kube-api-access-qs8ww\") pod \"auto-csr-approver-29536396-z667p\" (UID: \"894b2016-b019-41f6-81ea-5d12d397e99f\") " pod="openshift-infra/auto-csr-approver-29536396-z667p" Feb 27 09:16:00 crc kubenswrapper[4906]: I0227 09:16:00.286324 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qs8ww\" (UniqueName: \"kubernetes.io/projected/894b2016-b019-41f6-81ea-5d12d397e99f-kube-api-access-qs8ww\") pod \"auto-csr-approver-29536396-z667p\" (UID: \"894b2016-b019-41f6-81ea-5d12d397e99f\") " pod="openshift-infra/auto-csr-approver-29536396-z667p" Feb 27 09:16:00 crc kubenswrapper[4906]: I0227 09:16:00.313945 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qs8ww\" (UniqueName: \"kubernetes.io/projected/894b2016-b019-41f6-81ea-5d12d397e99f-kube-api-access-qs8ww\") pod \"auto-csr-approver-29536396-z667p\" (UID: \"894b2016-b019-41f6-81ea-5d12d397e99f\") " pod="openshift-infra/auto-csr-approver-29536396-z667p" Feb 27 09:16:00 crc kubenswrapper[4906]: I0227 09:16:00.484363 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536396-z667p" Feb 27 09:16:00 crc kubenswrapper[4906]: I0227 09:16:00.986271 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536396-z667p"] Feb 27 09:16:01 crc kubenswrapper[4906]: I0227 09:16:01.008087 4906 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 27 09:16:01 crc kubenswrapper[4906]: I0227 09:16:01.884098 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536396-z667p" event={"ID":"894b2016-b019-41f6-81ea-5d12d397e99f","Type":"ContainerStarted","Data":"1a6fd10eaf0977bac6c84e3f89881ee77519bdacb2c57ab1c08684d57c3f1146"} Feb 27 09:16:02 crc kubenswrapper[4906]: I0227 09:16:02.895429 4906 generic.go:334] "Generic (PLEG): container finished" podID="894b2016-b019-41f6-81ea-5d12d397e99f" containerID="b357b1f85a6f242e44ecad71d8e69bdc328a3bc611f51d50d4c4e4666e4106c3" exitCode=0 Feb 27 09:16:02 crc kubenswrapper[4906]: I0227 09:16:02.895702 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536396-z667p" event={"ID":"894b2016-b019-41f6-81ea-5d12d397e99f","Type":"ContainerDied","Data":"b357b1f85a6f242e44ecad71d8e69bdc328a3bc611f51d50d4c4e4666e4106c3"} Feb 27 09:16:04 crc kubenswrapper[4906]: I0227 09:16:04.251532 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536396-z667p" Feb 27 09:16:04 crc kubenswrapper[4906]: I0227 09:16:04.382940 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs8ww\" (UniqueName: \"kubernetes.io/projected/894b2016-b019-41f6-81ea-5d12d397e99f-kube-api-access-qs8ww\") pod \"894b2016-b019-41f6-81ea-5d12d397e99f\" (UID: \"894b2016-b019-41f6-81ea-5d12d397e99f\") " Feb 27 09:16:04 crc kubenswrapper[4906]: I0227 09:16:04.392082 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/894b2016-b019-41f6-81ea-5d12d397e99f-kube-api-access-qs8ww" (OuterVolumeSpecName: "kube-api-access-qs8ww") pod "894b2016-b019-41f6-81ea-5d12d397e99f" (UID: "894b2016-b019-41f6-81ea-5d12d397e99f"). InnerVolumeSpecName "kube-api-access-qs8ww". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:16:04 crc kubenswrapper[4906]: I0227 09:16:04.485269 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs8ww\" (UniqueName: \"kubernetes.io/projected/894b2016-b019-41f6-81ea-5d12d397e99f-kube-api-access-qs8ww\") on node \"crc\" DevicePath \"\"" Feb 27 09:16:04 crc kubenswrapper[4906]: I0227 09:16:04.918472 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536396-z667p" event={"ID":"894b2016-b019-41f6-81ea-5d12d397e99f","Type":"ContainerDied","Data":"1a6fd10eaf0977bac6c84e3f89881ee77519bdacb2c57ab1c08684d57c3f1146"} Feb 27 09:16:04 crc kubenswrapper[4906]: I0227 09:16:04.918790 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a6fd10eaf0977bac6c84e3f89881ee77519bdacb2c57ab1c08684d57c3f1146" Feb 27 09:16:04 crc kubenswrapper[4906]: I0227 09:16:04.918562 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536396-z667p" Feb 27 09:16:05 crc kubenswrapper[4906]: I0227 09:16:05.216161 4906 scope.go:117] "RemoveContainer" containerID="518463c7d5f4d22b76c4a2479bc7d8eb22106a8f02cce6f686e3bdb0081c951a" Feb 27 09:16:05 crc kubenswrapper[4906]: I0227 09:16:05.329818 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536390-trhcv"] Feb 27 09:16:05 crc kubenswrapper[4906]: I0227 09:16:05.339501 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536390-trhcv"] Feb 27 09:16:06 crc kubenswrapper[4906]: I0227 09:16:06.565549 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b" path="/var/lib/kubelet/pods/90ee72f1-ffa2-43b5-9ff9-95ae5273ea9b/volumes" Feb 27 09:16:23 crc kubenswrapper[4906]: I0227 09:16:23.093695 4906 generic.go:334] "Generic (PLEG): container finished" podID="a02ebde9-1894-4df1-a904-7d898d684871" containerID="146e2a629869da87eed2f84b00f28a6d1f0a4ef52d8609f2b83ff12f9caa97b6" exitCode=0 Feb 27 09:16:23 crc kubenswrapper[4906]: I0227 09:16:23.093778 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" event={"ID":"a02ebde9-1894-4df1-a904-7d898d684871","Type":"ContainerDied","Data":"146e2a629869da87eed2f84b00f28a6d1f0a4ef52d8609f2b83ff12f9caa97b6"} Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.588200 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.726507 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-telemetry-combined-ca-bundle\") pod \"a02ebde9-1894-4df1-a904-7d898d684871\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.726652 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ceilometer-compute-config-data-0\") pod \"a02ebde9-1894-4df1-a904-7d898d684871\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.726698 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ceilometer-compute-config-data-1\") pod \"a02ebde9-1894-4df1-a904-7d898d684871\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.726777 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ssh-key-openstack-edpm-ipam\") pod \"a02ebde9-1894-4df1-a904-7d898d684871\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.726862 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ceilometer-compute-config-data-2\") pod \"a02ebde9-1894-4df1-a904-7d898d684871\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.726982 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hzlxp\" (UniqueName: \"kubernetes.io/projected/a02ebde9-1894-4df1-a904-7d898d684871-kube-api-access-hzlxp\") pod \"a02ebde9-1894-4df1-a904-7d898d684871\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.727023 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-inventory\") pod \"a02ebde9-1894-4df1-a904-7d898d684871\" (UID: \"a02ebde9-1894-4df1-a904-7d898d684871\") " Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.732511 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a02ebde9-1894-4df1-a904-7d898d684871-kube-api-access-hzlxp" (OuterVolumeSpecName: "kube-api-access-hzlxp") pod "a02ebde9-1894-4df1-a904-7d898d684871" (UID: "a02ebde9-1894-4df1-a904-7d898d684871"). InnerVolumeSpecName "kube-api-access-hzlxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.733002 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "a02ebde9-1894-4df1-a904-7d898d684871" (UID: "a02ebde9-1894-4df1-a904-7d898d684871"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.754115 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "a02ebde9-1894-4df1-a904-7d898d684871" (UID: "a02ebde9-1894-4df1-a904-7d898d684871"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.756043 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "a02ebde9-1894-4df1-a904-7d898d684871" (UID: "a02ebde9-1894-4df1-a904-7d898d684871"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.756098 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "a02ebde9-1894-4df1-a904-7d898d684871" (UID: "a02ebde9-1894-4df1-a904-7d898d684871"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.767611 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-inventory" (OuterVolumeSpecName: "inventory") pod "a02ebde9-1894-4df1-a904-7d898d684871" (UID: "a02ebde9-1894-4df1-a904-7d898d684871"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.768920 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "a02ebde9-1894-4df1-a904-7d898d684871" (UID: "a02ebde9-1894-4df1-a904-7d898d684871"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.830996 4906 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.831032 4906 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.831045 4906 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.831055 4906 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.831063 4906 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.831073 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hzlxp\" (UniqueName: \"kubernetes.io/projected/a02ebde9-1894-4df1-a904-7d898d684871-kube-api-access-hzlxp\") on node \"crc\" DevicePath \"\"" Feb 27 09:16:24 crc kubenswrapper[4906]: I0227 09:16:24.831083 4906 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a02ebde9-1894-4df1-a904-7d898d684871-inventory\") on node \"crc\" DevicePath \"\"" Feb 27 09:16:25 crc kubenswrapper[4906]: I0227 09:16:25.125612 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" event={"ID":"a02ebde9-1894-4df1-a904-7d898d684871","Type":"ContainerDied","Data":"bb03b6d185b8936385904214c6746e7589583657090bbb7e0f84b34af00e6b35"} Feb 27 09:16:25 crc kubenswrapper[4906]: I0227 09:16:25.125689 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb03b6d185b8936385904214c6746e7589583657090bbb7e0f84b34af00e6b35" Feb 27 09:16:25 crc kubenswrapper[4906]: I0227 09:16:25.125858 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-jkswf" Feb 27 09:16:36 crc kubenswrapper[4906]: I0227 09:16:36.391790 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hsf9x"] Feb 27 09:16:36 crc kubenswrapper[4906]: E0227 09:16:36.392900 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="894b2016-b019-41f6-81ea-5d12d397e99f" containerName="oc" Feb 27 09:16:36 crc kubenswrapper[4906]: I0227 09:16:36.392921 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="894b2016-b019-41f6-81ea-5d12d397e99f" containerName="oc" Feb 27 09:16:36 crc kubenswrapper[4906]: E0227 09:16:36.392939 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a02ebde9-1894-4df1-a904-7d898d684871" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Feb 27 09:16:36 crc kubenswrapper[4906]: I0227 09:16:36.392949 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="a02ebde9-1894-4df1-a904-7d898d684871" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Feb 27 09:16:36 crc kubenswrapper[4906]: I0227 09:16:36.393202 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="a02ebde9-1894-4df1-a904-7d898d684871" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Feb 27 09:16:36 crc kubenswrapper[4906]: I0227 09:16:36.393231 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="894b2016-b019-41f6-81ea-5d12d397e99f" containerName="oc" Feb 27 09:16:36 crc kubenswrapper[4906]: I0227 09:16:36.395158 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hsf9x" Feb 27 09:16:36 crc kubenswrapper[4906]: I0227 09:16:36.405937 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hsf9x"] Feb 27 09:16:36 crc kubenswrapper[4906]: I0227 09:16:36.582961 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50dcc049-c987-43a6-be3b-36d67b99c1fe-utilities\") pod \"community-operators-hsf9x\" (UID: \"50dcc049-c987-43a6-be3b-36d67b99c1fe\") " pod="openshift-marketplace/community-operators-hsf9x" Feb 27 09:16:36 crc kubenswrapper[4906]: I0227 09:16:36.583020 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwzbk\" (UniqueName: \"kubernetes.io/projected/50dcc049-c987-43a6-be3b-36d67b99c1fe-kube-api-access-rwzbk\") pod \"community-operators-hsf9x\" (UID: \"50dcc049-c987-43a6-be3b-36d67b99c1fe\") " pod="openshift-marketplace/community-operators-hsf9x" Feb 27 09:16:36 crc kubenswrapper[4906]: I0227 09:16:36.583076 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50dcc049-c987-43a6-be3b-36d67b99c1fe-catalog-content\") pod \"community-operators-hsf9x\" (UID: \"50dcc049-c987-43a6-be3b-36d67b99c1fe\") " pod="openshift-marketplace/community-operators-hsf9x" Feb 27 09:16:36 crc kubenswrapper[4906]: I0227 09:16:36.684724 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50dcc049-c987-43a6-be3b-36d67b99c1fe-utilities\") pod \"community-operators-hsf9x\" (UID: \"50dcc049-c987-43a6-be3b-36d67b99c1fe\") " pod="openshift-marketplace/community-operators-hsf9x" Feb 27 09:16:36 crc kubenswrapper[4906]: I0227 09:16:36.685292 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50dcc049-c987-43a6-be3b-36d67b99c1fe-utilities\") pod \"community-operators-hsf9x\" (UID: \"50dcc049-c987-43a6-be3b-36d67b99c1fe\") " pod="openshift-marketplace/community-operators-hsf9x" Feb 27 09:16:36 crc kubenswrapper[4906]: I0227 09:16:36.685433 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwzbk\" (UniqueName: \"kubernetes.io/projected/50dcc049-c987-43a6-be3b-36d67b99c1fe-kube-api-access-rwzbk\") pod \"community-operators-hsf9x\" (UID: \"50dcc049-c987-43a6-be3b-36d67b99c1fe\") " pod="openshift-marketplace/community-operators-hsf9x" Feb 27 09:16:36 crc kubenswrapper[4906]: I0227 09:16:36.685560 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50dcc049-c987-43a6-be3b-36d67b99c1fe-catalog-content\") pod \"community-operators-hsf9x\" (UID: \"50dcc049-c987-43a6-be3b-36d67b99c1fe\") " pod="openshift-marketplace/community-operators-hsf9x" Feb 27 09:16:36 crc kubenswrapper[4906]: I0227 09:16:36.686107 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50dcc049-c987-43a6-be3b-36d67b99c1fe-catalog-content\") pod \"community-operators-hsf9x\" (UID: \"50dcc049-c987-43a6-be3b-36d67b99c1fe\") " pod="openshift-marketplace/community-operators-hsf9x" Feb 27 09:16:36 crc kubenswrapper[4906]: I0227 09:16:36.709854 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwzbk\" (UniqueName: \"kubernetes.io/projected/50dcc049-c987-43a6-be3b-36d67b99c1fe-kube-api-access-rwzbk\") pod \"community-operators-hsf9x\" (UID: \"50dcc049-c987-43a6-be3b-36d67b99c1fe\") " pod="openshift-marketplace/community-operators-hsf9x" Feb 27 09:16:36 crc kubenswrapper[4906]: I0227 09:16:36.740457 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hsf9x" Feb 27 09:16:37 crc kubenswrapper[4906]: I0227 09:16:37.311939 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hsf9x"] Feb 27 09:16:38 crc kubenswrapper[4906]: I0227 09:16:38.287809 4906 generic.go:334] "Generic (PLEG): container finished" podID="50dcc049-c987-43a6-be3b-36d67b99c1fe" containerID="bd33ba5ae23f6f9972ef58781bb13597135bc9c783ca04a9777b742e3e043bd9" exitCode=0 Feb 27 09:16:38 crc kubenswrapper[4906]: I0227 09:16:38.287934 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hsf9x" event={"ID":"50dcc049-c987-43a6-be3b-36d67b99c1fe","Type":"ContainerDied","Data":"bd33ba5ae23f6f9972ef58781bb13597135bc9c783ca04a9777b742e3e043bd9"} Feb 27 09:16:38 crc kubenswrapper[4906]: I0227 09:16:38.288616 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hsf9x" event={"ID":"50dcc049-c987-43a6-be3b-36d67b99c1fe","Type":"ContainerStarted","Data":"f54c36ae4feaa32ad57b0a25924a487c5807311c323eb6846d018752f80d578b"} Feb 27 09:16:39 crc kubenswrapper[4906]: I0227 09:16:39.298057 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hsf9x" event={"ID":"50dcc049-c987-43a6-be3b-36d67b99c1fe","Type":"ContainerStarted","Data":"7d3f06c6aabf28ca5b7387fbbcf8b9f8e84a56bccfb33ce654fe1882525b3f4b"} Feb 27 09:16:40 crc kubenswrapper[4906]: I0227 09:16:40.313210 4906 generic.go:334] "Generic (PLEG): container finished" podID="50dcc049-c987-43a6-be3b-36d67b99c1fe" containerID="7d3f06c6aabf28ca5b7387fbbcf8b9f8e84a56bccfb33ce654fe1882525b3f4b" exitCode=0 Feb 27 09:16:40 crc kubenswrapper[4906]: I0227 09:16:40.313268 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hsf9x" event={"ID":"50dcc049-c987-43a6-be3b-36d67b99c1fe","Type":"ContainerDied","Data":"7d3f06c6aabf28ca5b7387fbbcf8b9f8e84a56bccfb33ce654fe1882525b3f4b"} Feb 27 09:16:41 crc kubenswrapper[4906]: I0227 09:16:41.328751 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hsf9x" event={"ID":"50dcc049-c987-43a6-be3b-36d67b99c1fe","Type":"ContainerStarted","Data":"c811fc437f612291d160137a64b7f7c0b0ee05c062d7d963cf42e3434c916d5a"} Feb 27 09:16:41 crc kubenswrapper[4906]: I0227 09:16:41.360551 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hsf9x" podStartSLOduration=2.916208279 podStartE2EDuration="5.360524119s" podCreationTimestamp="2026-02-27 09:16:36 +0000 UTC" firstStartedPulling="2026-02-27 09:16:38.291712315 +0000 UTC m=+2896.686113965" lastFinishedPulling="2026-02-27 09:16:40.736028155 +0000 UTC m=+2899.130429805" observedRunningTime="2026-02-27 09:16:41.35556647 +0000 UTC m=+2899.749968120" watchObservedRunningTime="2026-02-27 09:16:41.360524119 +0000 UTC m=+2899.754925739" Feb 27 09:16:46 crc kubenswrapper[4906]: I0227 09:16:46.741350 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hsf9x" Feb 27 09:16:46 crc kubenswrapper[4906]: I0227 09:16:46.741965 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hsf9x" Feb 27 09:16:46 crc kubenswrapper[4906]: I0227 09:16:46.807236 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hsf9x" Feb 27 09:16:47 crc kubenswrapper[4906]: I0227 09:16:47.447113 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hsf9x" Feb 27 09:16:47 crc kubenswrapper[4906]: I0227 09:16:47.505149 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hsf9x"] Feb 27 09:16:49 crc kubenswrapper[4906]: I0227 09:16:49.396704 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hsf9x" podUID="50dcc049-c987-43a6-be3b-36d67b99c1fe" containerName="registry-server" containerID="cri-o://c811fc437f612291d160137a64b7f7c0b0ee05c062d7d963cf42e3434c916d5a" gracePeriod=2 Feb 27 09:16:49 crc kubenswrapper[4906]: I0227 09:16:49.948328 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hsf9x" Feb 27 09:16:49 crc kubenswrapper[4906]: I0227 09:16:49.991758 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50dcc049-c987-43a6-be3b-36d67b99c1fe-utilities\") pod \"50dcc049-c987-43a6-be3b-36d67b99c1fe\" (UID: \"50dcc049-c987-43a6-be3b-36d67b99c1fe\") " Feb 27 09:16:49 crc kubenswrapper[4906]: I0227 09:16:49.991842 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50dcc049-c987-43a6-be3b-36d67b99c1fe-catalog-content\") pod \"50dcc049-c987-43a6-be3b-36d67b99c1fe\" (UID: \"50dcc049-c987-43a6-be3b-36d67b99c1fe\") " Feb 27 09:16:49 crc kubenswrapper[4906]: I0227 09:16:49.992186 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwzbk\" (UniqueName: \"kubernetes.io/projected/50dcc049-c987-43a6-be3b-36d67b99c1fe-kube-api-access-rwzbk\") pod \"50dcc049-c987-43a6-be3b-36d67b99c1fe\" (UID: \"50dcc049-c987-43a6-be3b-36d67b99c1fe\") " Feb 27 09:16:49 crc kubenswrapper[4906]: I0227 09:16:49.992776 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50dcc049-c987-43a6-be3b-36d67b99c1fe-utilities" (OuterVolumeSpecName: "utilities") pod "50dcc049-c987-43a6-be3b-36d67b99c1fe" (UID: "50dcc049-c987-43a6-be3b-36d67b99c1fe"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.015819 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50dcc049-c987-43a6-be3b-36d67b99c1fe-kube-api-access-rwzbk" (OuterVolumeSpecName: "kube-api-access-rwzbk") pod "50dcc049-c987-43a6-be3b-36d67b99c1fe" (UID: "50dcc049-c987-43a6-be3b-36d67b99c1fe"). InnerVolumeSpecName "kube-api-access-rwzbk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.056178 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/50dcc049-c987-43a6-be3b-36d67b99c1fe-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "50dcc049-c987-43a6-be3b-36d67b99c1fe" (UID: "50dcc049-c987-43a6-be3b-36d67b99c1fe"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.095458 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwzbk\" (UniqueName: \"kubernetes.io/projected/50dcc049-c987-43a6-be3b-36d67b99c1fe-kube-api-access-rwzbk\") on node \"crc\" DevicePath \"\"" Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.095509 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/50dcc049-c987-43a6-be3b-36d67b99c1fe-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.095522 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/50dcc049-c987-43a6-be3b-36d67b99c1fe-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.414809 4906 generic.go:334] "Generic (PLEG): container finished" podID="50dcc049-c987-43a6-be3b-36d67b99c1fe" containerID="c811fc437f612291d160137a64b7f7c0b0ee05c062d7d963cf42e3434c916d5a" exitCode=0 Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.414871 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hsf9x" event={"ID":"50dcc049-c987-43a6-be3b-36d67b99c1fe","Type":"ContainerDied","Data":"c811fc437f612291d160137a64b7f7c0b0ee05c062d7d963cf42e3434c916d5a"} Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.414953 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hsf9x" event={"ID":"50dcc049-c987-43a6-be3b-36d67b99c1fe","Type":"ContainerDied","Data":"f54c36ae4feaa32ad57b0a25924a487c5807311c323eb6846d018752f80d578b"} Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.414979 4906 scope.go:117] "RemoveContainer" containerID="c811fc437f612291d160137a64b7f7c0b0ee05c062d7d963cf42e3434c916d5a" Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.415039 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hsf9x" Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.443295 4906 scope.go:117] "RemoveContainer" containerID="7d3f06c6aabf28ca5b7387fbbcf8b9f8e84a56bccfb33ce654fe1882525b3f4b" Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.471031 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hsf9x"] Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.481268 4906 scope.go:117] "RemoveContainer" containerID="bd33ba5ae23f6f9972ef58781bb13597135bc9c783ca04a9777b742e3e043bd9" Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.481910 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hsf9x"] Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.506121 4906 scope.go:117] "RemoveContainer" containerID="c811fc437f612291d160137a64b7f7c0b0ee05c062d7d963cf42e3434c916d5a" Feb 27 09:16:50 crc kubenswrapper[4906]: E0227 09:16:50.506588 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c811fc437f612291d160137a64b7f7c0b0ee05c062d7d963cf42e3434c916d5a\": container with ID starting with c811fc437f612291d160137a64b7f7c0b0ee05c062d7d963cf42e3434c916d5a not found: ID does not exist" containerID="c811fc437f612291d160137a64b7f7c0b0ee05c062d7d963cf42e3434c916d5a" Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.506712 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c811fc437f612291d160137a64b7f7c0b0ee05c062d7d963cf42e3434c916d5a"} err="failed to get container status \"c811fc437f612291d160137a64b7f7c0b0ee05c062d7d963cf42e3434c916d5a\": rpc error: code = NotFound desc = could not find container \"c811fc437f612291d160137a64b7f7c0b0ee05c062d7d963cf42e3434c916d5a\": container with ID starting with c811fc437f612291d160137a64b7f7c0b0ee05c062d7d963cf42e3434c916d5a not found: ID does not exist" Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.506820 4906 scope.go:117] "RemoveContainer" containerID="7d3f06c6aabf28ca5b7387fbbcf8b9f8e84a56bccfb33ce654fe1882525b3f4b" Feb 27 09:16:50 crc kubenswrapper[4906]: E0227 09:16:50.507500 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d3f06c6aabf28ca5b7387fbbcf8b9f8e84a56bccfb33ce654fe1882525b3f4b\": container with ID starting with 7d3f06c6aabf28ca5b7387fbbcf8b9f8e84a56bccfb33ce654fe1882525b3f4b not found: ID does not exist" containerID="7d3f06c6aabf28ca5b7387fbbcf8b9f8e84a56bccfb33ce654fe1882525b3f4b" Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.507542 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d3f06c6aabf28ca5b7387fbbcf8b9f8e84a56bccfb33ce654fe1882525b3f4b"} err="failed to get container status \"7d3f06c6aabf28ca5b7387fbbcf8b9f8e84a56bccfb33ce654fe1882525b3f4b\": rpc error: code = NotFound desc = could not find container \"7d3f06c6aabf28ca5b7387fbbcf8b9f8e84a56bccfb33ce654fe1882525b3f4b\": container with ID starting with 7d3f06c6aabf28ca5b7387fbbcf8b9f8e84a56bccfb33ce654fe1882525b3f4b not found: ID does not exist" Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.507564 4906 scope.go:117] "RemoveContainer" containerID="bd33ba5ae23f6f9972ef58781bb13597135bc9c783ca04a9777b742e3e043bd9" Feb 27 09:16:50 crc kubenswrapper[4906]: E0227 09:16:50.507898 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd33ba5ae23f6f9972ef58781bb13597135bc9c783ca04a9777b742e3e043bd9\": container with ID starting with bd33ba5ae23f6f9972ef58781bb13597135bc9c783ca04a9777b742e3e043bd9 not found: ID does not exist" containerID="bd33ba5ae23f6f9972ef58781bb13597135bc9c783ca04a9777b742e3e043bd9" Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.507961 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd33ba5ae23f6f9972ef58781bb13597135bc9c783ca04a9777b742e3e043bd9"} err="failed to get container status \"bd33ba5ae23f6f9972ef58781bb13597135bc9c783ca04a9777b742e3e043bd9\": rpc error: code = NotFound desc = could not find container \"bd33ba5ae23f6f9972ef58781bb13597135bc9c783ca04a9777b742e3e043bd9\": container with ID starting with bd33ba5ae23f6f9972ef58781bb13597135bc9c783ca04a9777b742e3e043bd9 not found: ID does not exist" Feb 27 09:16:50 crc kubenswrapper[4906]: I0227 09:16:50.564376 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50dcc049-c987-43a6-be3b-36d67b99c1fe" path="/var/lib/kubelet/pods/50dcc049-c987-43a6-be3b-36d67b99c1fe/volumes" Feb 27 09:17:05 crc kubenswrapper[4906]: I0227 09:17:05.287544 4906 scope.go:117] "RemoveContainer" containerID="f17bd155d7ac0fb9528689230c402a875310975ed8a8fe5024044805ed3a277d" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.445923 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Feb 27 09:17:21 crc kubenswrapper[4906]: E0227 09:17:21.447219 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50dcc049-c987-43a6-be3b-36d67b99c1fe" containerName="extract-content" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.447239 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="50dcc049-c987-43a6-be3b-36d67b99c1fe" containerName="extract-content" Feb 27 09:17:21 crc kubenswrapper[4906]: E0227 09:17:21.447309 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50dcc049-c987-43a6-be3b-36d67b99c1fe" containerName="extract-utilities" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.447318 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="50dcc049-c987-43a6-be3b-36d67b99c1fe" containerName="extract-utilities" Feb 27 09:17:21 crc kubenswrapper[4906]: E0227 09:17:21.447332 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50dcc049-c987-43a6-be3b-36d67b99c1fe" containerName="registry-server" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.447340 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="50dcc049-c987-43a6-be3b-36d67b99c1fe" containerName="registry-server" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.447591 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="50dcc049-c987-43a6-be3b-36d67b99c1fe" containerName="registry-server" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.448520 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.451360 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.451409 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.451907 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.456571 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-gr5lb" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.467528 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.524433 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.524488 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/006c37d3-09e7-4ee5-aedf-8e3ea8049043-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.524513 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dsj7c\" (UniqueName: \"kubernetes.io/projected/006c37d3-09e7-4ee5-aedf-8e3ea8049043-kube-api-access-dsj7c\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.524602 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/006c37d3-09e7-4ee5-aedf-8e3ea8049043-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.524649 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/006c37d3-09e7-4ee5-aedf-8e3ea8049043-config-data\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.524764 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/006c37d3-09e7-4ee5-aedf-8e3ea8049043-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.524806 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/006c37d3-09e7-4ee5-aedf-8e3ea8049043-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.524856 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/006c37d3-09e7-4ee5-aedf-8e3ea8049043-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.524993 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/006c37d3-09e7-4ee5-aedf-8e3ea8049043-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.626991 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.627061 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/006c37d3-09e7-4ee5-aedf-8e3ea8049043-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.627101 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dsj7c\" (UniqueName: \"kubernetes.io/projected/006c37d3-09e7-4ee5-aedf-8e3ea8049043-kube-api-access-dsj7c\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.627171 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/006c37d3-09e7-4ee5-aedf-8e3ea8049043-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.627217 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/006c37d3-09e7-4ee5-aedf-8e3ea8049043-config-data\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.627362 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/006c37d3-09e7-4ee5-aedf-8e3ea8049043-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.627406 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/006c37d3-09e7-4ee5-aedf-8e3ea8049043-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.627466 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/006c37d3-09e7-4ee5-aedf-8e3ea8049043-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.627579 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/006c37d3-09e7-4ee5-aedf-8e3ea8049043-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.627923 4906 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.628317 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/006c37d3-09e7-4ee5-aedf-8e3ea8049043-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.628736 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/006c37d3-09e7-4ee5-aedf-8e3ea8049043-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.629755 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/006c37d3-09e7-4ee5-aedf-8e3ea8049043-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.630141 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/006c37d3-09e7-4ee5-aedf-8e3ea8049043-config-data\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.639975 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/006c37d3-09e7-4ee5-aedf-8e3ea8049043-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.640104 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/006c37d3-09e7-4ee5-aedf-8e3ea8049043-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.640127 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/006c37d3-09e7-4ee5-aedf-8e3ea8049043-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.648291 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dsj7c\" (UniqueName: \"kubernetes.io/projected/006c37d3-09e7-4ee5-aedf-8e3ea8049043-kube-api-access-dsj7c\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.665537 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"tempest-tests-tempest\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " pod="openstack/tempest-tests-tempest" Feb 27 09:17:21 crc kubenswrapper[4906]: I0227 09:17:21.778563 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Feb 27 09:17:22 crc kubenswrapper[4906]: I0227 09:17:22.274798 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Feb 27 09:17:22 crc kubenswrapper[4906]: I0227 09:17:22.743116 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"006c37d3-09e7-4ee5-aedf-8e3ea8049043","Type":"ContainerStarted","Data":"281f5b1b7080d587845197d008e68793a24a21fa0afdadaa1aeaeeb408cf861a"} Feb 27 09:17:24 crc kubenswrapper[4906]: I0227 09:17:24.844752 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 09:17:24 crc kubenswrapper[4906]: I0227 09:17:24.845348 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 09:17:52 crc kubenswrapper[4906]: E0227 09:17:52.931384 4906 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Feb 27 09:17:52 crc kubenswrapper[4906]: E0227 09:17:52.932403 4906 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dsj7c,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(006c37d3-09e7-4ee5-aedf-8e3ea8049043): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 27 09:17:52 crc kubenswrapper[4906]: E0227 09:17:52.934855 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="006c37d3-09e7-4ee5-aedf-8e3ea8049043" Feb 27 09:17:53 crc kubenswrapper[4906]: E0227 09:17:53.063553 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="006c37d3-09e7-4ee5-aedf-8e3ea8049043" Feb 27 09:17:54 crc kubenswrapper[4906]: I0227 09:17:54.844725 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 09:17:54 crc kubenswrapper[4906]: I0227 09:17:54.845013 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 09:18:00 crc kubenswrapper[4906]: I0227 09:18:00.154037 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536398-bhq4t"] Feb 27 09:18:00 crc kubenswrapper[4906]: I0227 09:18:00.158000 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536398-bhq4t" Feb 27 09:18:00 crc kubenswrapper[4906]: I0227 09:18:00.160794 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 09:18:00 crc kubenswrapper[4906]: I0227 09:18:00.161175 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 09:18:00 crc kubenswrapper[4906]: I0227 09:18:00.164780 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 09:18:00 crc kubenswrapper[4906]: I0227 09:18:00.167949 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536398-bhq4t"] Feb 27 09:18:00 crc kubenswrapper[4906]: I0227 09:18:00.168616 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fktv2\" (UniqueName: \"kubernetes.io/projected/ec8a8ad7-ef55-4952-82ca-1e9b76e3016b-kube-api-access-fktv2\") pod \"auto-csr-approver-29536398-bhq4t\" (UID: \"ec8a8ad7-ef55-4952-82ca-1e9b76e3016b\") " pod="openshift-infra/auto-csr-approver-29536398-bhq4t" Feb 27 09:18:00 crc kubenswrapper[4906]: I0227 09:18:00.270250 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fktv2\" (UniqueName: \"kubernetes.io/projected/ec8a8ad7-ef55-4952-82ca-1e9b76e3016b-kube-api-access-fktv2\") pod \"auto-csr-approver-29536398-bhq4t\" (UID: \"ec8a8ad7-ef55-4952-82ca-1e9b76e3016b\") " pod="openshift-infra/auto-csr-approver-29536398-bhq4t" Feb 27 09:18:00 crc kubenswrapper[4906]: I0227 09:18:00.297846 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fktv2\" (UniqueName: \"kubernetes.io/projected/ec8a8ad7-ef55-4952-82ca-1e9b76e3016b-kube-api-access-fktv2\") pod \"auto-csr-approver-29536398-bhq4t\" (UID: \"ec8a8ad7-ef55-4952-82ca-1e9b76e3016b\") " pod="openshift-infra/auto-csr-approver-29536398-bhq4t" Feb 27 09:18:00 crc kubenswrapper[4906]: I0227 09:18:00.482279 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536398-bhq4t" Feb 27 09:18:00 crc kubenswrapper[4906]: I0227 09:18:00.951278 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536398-bhq4t"] Feb 27 09:18:01 crc kubenswrapper[4906]: I0227 09:18:01.138423 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536398-bhq4t" event={"ID":"ec8a8ad7-ef55-4952-82ca-1e9b76e3016b","Type":"ContainerStarted","Data":"4caf45b035469b08953d9c786d3531a50e752a7d70a54820291a5514775a34e3"} Feb 27 09:18:02 crc kubenswrapper[4906]: I0227 09:18:02.151730 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536398-bhq4t" event={"ID":"ec8a8ad7-ef55-4952-82ca-1e9b76e3016b","Type":"ContainerStarted","Data":"b57514a561900453566b9ebb8b5cd14eb3df055bb8f541e4fb34d0768ebc3ab7"} Feb 27 09:18:02 crc kubenswrapper[4906]: I0227 09:18:02.173088 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-infra/auto-csr-approver-29536398-bhq4t" podStartSLOduration=1.39434262 podStartE2EDuration="2.173060384s" podCreationTimestamp="2026-02-27 09:18:00 +0000 UTC" firstStartedPulling="2026-02-27 09:18:00.960538196 +0000 UTC m=+2979.354939826" lastFinishedPulling="2026-02-27 09:18:01.73925597 +0000 UTC m=+2980.133657590" observedRunningTime="2026-02-27 09:18:02.169356337 +0000 UTC m=+2980.563757947" watchObservedRunningTime="2026-02-27 09:18:02.173060384 +0000 UTC m=+2980.567461994" Feb 27 09:18:03 crc kubenswrapper[4906]: I0227 09:18:03.163700 4906 generic.go:334] "Generic (PLEG): container finished" podID="ec8a8ad7-ef55-4952-82ca-1e9b76e3016b" containerID="b57514a561900453566b9ebb8b5cd14eb3df055bb8f541e4fb34d0768ebc3ab7" exitCode=0 Feb 27 09:18:03 crc kubenswrapper[4906]: I0227 09:18:03.163783 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536398-bhq4t" event={"ID":"ec8a8ad7-ef55-4952-82ca-1e9b76e3016b","Type":"ContainerDied","Data":"b57514a561900453566b9ebb8b5cd14eb3df055bb8f541e4fb34d0768ebc3ab7"} Feb 27 09:18:04 crc kubenswrapper[4906]: I0227 09:18:04.551099 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536398-bhq4t" Feb 27 09:18:04 crc kubenswrapper[4906]: I0227 09:18:04.664660 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fktv2\" (UniqueName: \"kubernetes.io/projected/ec8a8ad7-ef55-4952-82ca-1e9b76e3016b-kube-api-access-fktv2\") pod \"ec8a8ad7-ef55-4952-82ca-1e9b76e3016b\" (UID: \"ec8a8ad7-ef55-4952-82ca-1e9b76e3016b\") " Feb 27 09:18:04 crc kubenswrapper[4906]: I0227 09:18:04.676613 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ec8a8ad7-ef55-4952-82ca-1e9b76e3016b-kube-api-access-fktv2" (OuterVolumeSpecName: "kube-api-access-fktv2") pod "ec8a8ad7-ef55-4952-82ca-1e9b76e3016b" (UID: "ec8a8ad7-ef55-4952-82ca-1e9b76e3016b"). InnerVolumeSpecName "kube-api-access-fktv2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:18:04 crc kubenswrapper[4906]: I0227 09:18:04.767681 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fktv2\" (UniqueName: \"kubernetes.io/projected/ec8a8ad7-ef55-4952-82ca-1e9b76e3016b-kube-api-access-fktv2\") on node \"crc\" DevicePath \"\"" Feb 27 09:18:05 crc kubenswrapper[4906]: I0227 09:18:05.190371 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536398-bhq4t" event={"ID":"ec8a8ad7-ef55-4952-82ca-1e9b76e3016b","Type":"ContainerDied","Data":"4caf45b035469b08953d9c786d3531a50e752a7d70a54820291a5514775a34e3"} Feb 27 09:18:05 crc kubenswrapper[4906]: I0227 09:18:05.190426 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4caf45b035469b08953d9c786d3531a50e752a7d70a54820291a5514775a34e3" Feb 27 09:18:05 crc kubenswrapper[4906]: I0227 09:18:05.190646 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536398-bhq4t" Feb 27 09:18:05 crc kubenswrapper[4906]: I0227 09:18:05.266834 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536392-9glgm"] Feb 27 09:18:05 crc kubenswrapper[4906]: I0227 09:18:05.278772 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536392-9glgm"] Feb 27 09:18:06 crc kubenswrapper[4906]: I0227 09:18:06.563206 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="098f1145-2bd1-40b6-9349-38409d6b89a9" path="/var/lib/kubelet/pods/098f1145-2bd1-40b6-9349-38409d6b89a9/volumes" Feb 27 09:18:06 crc kubenswrapper[4906]: I0227 09:18:06.780145 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Feb 27 09:18:08 crc kubenswrapper[4906]: I0227 09:18:08.217225 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"006c37d3-09e7-4ee5-aedf-8e3ea8049043","Type":"ContainerStarted","Data":"8fe84b6aebdd96f7fd246b3abf943306796085c3abb99b2bc216704de3e26788"} Feb 27 09:18:08 crc kubenswrapper[4906]: I0227 09:18:08.235295 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.740082995 podStartE2EDuration="48.23527515s" podCreationTimestamp="2026-02-27 09:17:20 +0000 UTC" firstStartedPulling="2026-02-27 09:17:22.280808171 +0000 UTC m=+2940.675209771" lastFinishedPulling="2026-02-27 09:18:06.776000316 +0000 UTC m=+2985.170401926" observedRunningTime="2026-02-27 09:18:08.232013264 +0000 UTC m=+2986.626414874" watchObservedRunningTime="2026-02-27 09:18:08.23527515 +0000 UTC m=+2986.629676760" Feb 27 09:18:20 crc kubenswrapper[4906]: I0227 09:18:20.173657 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wvpgx"] Feb 27 09:18:20 crc kubenswrapper[4906]: E0227 09:18:20.174604 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ec8a8ad7-ef55-4952-82ca-1e9b76e3016b" containerName="oc" Feb 27 09:18:20 crc kubenswrapper[4906]: I0227 09:18:20.174619 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="ec8a8ad7-ef55-4952-82ca-1e9b76e3016b" containerName="oc" Feb 27 09:18:20 crc kubenswrapper[4906]: I0227 09:18:20.174815 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="ec8a8ad7-ef55-4952-82ca-1e9b76e3016b" containerName="oc" Feb 27 09:18:20 crc kubenswrapper[4906]: I0227 09:18:20.176453 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wvpgx" Feb 27 09:18:20 crc kubenswrapper[4906]: I0227 09:18:20.192962 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wvpgx"] Feb 27 09:18:20 crc kubenswrapper[4906]: I0227 09:18:20.336038 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8723d5f6-40fd-4651-b8d4-825293a56289-utilities\") pod \"redhat-marketplace-wvpgx\" (UID: \"8723d5f6-40fd-4651-b8d4-825293a56289\") " pod="openshift-marketplace/redhat-marketplace-wvpgx" Feb 27 09:18:20 crc kubenswrapper[4906]: I0227 09:18:20.336412 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8723d5f6-40fd-4651-b8d4-825293a56289-catalog-content\") pod \"redhat-marketplace-wvpgx\" (UID: \"8723d5f6-40fd-4651-b8d4-825293a56289\") " pod="openshift-marketplace/redhat-marketplace-wvpgx" Feb 27 09:18:20 crc kubenswrapper[4906]: I0227 09:18:20.336843 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fxtv\" (UniqueName: \"kubernetes.io/projected/8723d5f6-40fd-4651-b8d4-825293a56289-kube-api-access-5fxtv\") pod \"redhat-marketplace-wvpgx\" (UID: \"8723d5f6-40fd-4651-b8d4-825293a56289\") " pod="openshift-marketplace/redhat-marketplace-wvpgx" Feb 27 09:18:20 crc kubenswrapper[4906]: I0227 09:18:20.438546 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8723d5f6-40fd-4651-b8d4-825293a56289-catalog-content\") pod \"redhat-marketplace-wvpgx\" (UID: \"8723d5f6-40fd-4651-b8d4-825293a56289\") " pod="openshift-marketplace/redhat-marketplace-wvpgx" Feb 27 09:18:20 crc kubenswrapper[4906]: I0227 09:18:20.439027 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8723d5f6-40fd-4651-b8d4-825293a56289-catalog-content\") pod \"redhat-marketplace-wvpgx\" (UID: \"8723d5f6-40fd-4651-b8d4-825293a56289\") " pod="openshift-marketplace/redhat-marketplace-wvpgx" Feb 27 09:18:20 crc kubenswrapper[4906]: I0227 09:18:20.439197 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fxtv\" (UniqueName: \"kubernetes.io/projected/8723d5f6-40fd-4651-b8d4-825293a56289-kube-api-access-5fxtv\") pod \"redhat-marketplace-wvpgx\" (UID: \"8723d5f6-40fd-4651-b8d4-825293a56289\") " pod="openshift-marketplace/redhat-marketplace-wvpgx" Feb 27 09:18:20 crc kubenswrapper[4906]: I0227 09:18:20.439351 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8723d5f6-40fd-4651-b8d4-825293a56289-utilities\") pod \"redhat-marketplace-wvpgx\" (UID: \"8723d5f6-40fd-4651-b8d4-825293a56289\") " pod="openshift-marketplace/redhat-marketplace-wvpgx" Feb 27 09:18:20 crc kubenswrapper[4906]: I0227 09:18:20.439821 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8723d5f6-40fd-4651-b8d4-825293a56289-utilities\") pod \"redhat-marketplace-wvpgx\" (UID: \"8723d5f6-40fd-4651-b8d4-825293a56289\") " pod="openshift-marketplace/redhat-marketplace-wvpgx" Feb 27 09:18:20 crc kubenswrapper[4906]: I0227 09:18:20.462282 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fxtv\" (UniqueName: \"kubernetes.io/projected/8723d5f6-40fd-4651-b8d4-825293a56289-kube-api-access-5fxtv\") pod \"redhat-marketplace-wvpgx\" (UID: \"8723d5f6-40fd-4651-b8d4-825293a56289\") " pod="openshift-marketplace/redhat-marketplace-wvpgx" Feb 27 09:18:20 crc kubenswrapper[4906]: I0227 09:18:20.495176 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wvpgx" Feb 27 09:18:20 crc kubenswrapper[4906]: I0227 09:18:20.988561 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wvpgx"] Feb 27 09:18:20 crc kubenswrapper[4906]: W0227 09:18:20.993415 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8723d5f6_40fd_4651_b8d4_825293a56289.slice/crio-3a381cee4a7c292acb3edc616941197314733546595ecd721c44d9090d5682ee WatchSource:0}: Error finding container 3a381cee4a7c292acb3edc616941197314733546595ecd721c44d9090d5682ee: Status 404 returned error can't find the container with id 3a381cee4a7c292acb3edc616941197314733546595ecd721c44d9090d5682ee Feb 27 09:18:21 crc kubenswrapper[4906]: I0227 09:18:21.340993 4906 generic.go:334] "Generic (PLEG): container finished" podID="006c37d3-09e7-4ee5-aedf-8e3ea8049043" containerID="8fe84b6aebdd96f7fd246b3abf943306796085c3abb99b2bc216704de3e26788" exitCode=123 Feb 27 09:18:21 crc kubenswrapper[4906]: I0227 09:18:21.341081 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"006c37d3-09e7-4ee5-aedf-8e3ea8049043","Type":"ContainerDied","Data":"8fe84b6aebdd96f7fd246b3abf943306796085c3abb99b2bc216704de3e26788"} Feb 27 09:18:21 crc kubenswrapper[4906]: I0227 09:18:21.343263 4906 generic.go:334] "Generic (PLEG): container finished" podID="8723d5f6-40fd-4651-b8d4-825293a56289" containerID="c9365be65b2265a2cd0d8249f3c93c3ff67b92c7b2adc5daabe9b2014c02b04c" exitCode=0 Feb 27 09:18:21 crc kubenswrapper[4906]: I0227 09:18:21.343300 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wvpgx" event={"ID":"8723d5f6-40fd-4651-b8d4-825293a56289","Type":"ContainerDied","Data":"c9365be65b2265a2cd0d8249f3c93c3ff67b92c7b2adc5daabe9b2014c02b04c"} Feb 27 09:18:21 crc kubenswrapper[4906]: I0227 09:18:21.343323 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wvpgx" event={"ID":"8723d5f6-40fd-4651-b8d4-825293a56289","Type":"ContainerStarted","Data":"3a381cee4a7c292acb3edc616941197314733546595ecd721c44d9090d5682ee"} Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.354423 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wvpgx" event={"ID":"8723d5f6-40fd-4651-b8d4-825293a56289","Type":"ContainerStarted","Data":"cb06eec51ffa55529880c9fa7712cf88534716ebfd6af50c1cafe1ba95437cc5"} Feb 27 09:18:22 crc kubenswrapper[4906]: E0227 09:18:22.515305 4906 info.go:109] Failed to get network devices: open /sys/class/net/281f5b1b7080d58/address: no such file or directory Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.809195 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.892786 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dsj7c\" (UniqueName: \"kubernetes.io/projected/006c37d3-09e7-4ee5-aedf-8e3ea8049043-kube-api-access-dsj7c\") pod \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.892871 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/006c37d3-09e7-4ee5-aedf-8e3ea8049043-test-operator-ephemeral-temporary\") pod \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.892973 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/006c37d3-09e7-4ee5-aedf-8e3ea8049043-openstack-config\") pod \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.893001 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.893039 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/006c37d3-09e7-4ee5-aedf-8e3ea8049043-ca-certs\") pod \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.893055 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/006c37d3-09e7-4ee5-aedf-8e3ea8049043-config-data\") pod \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.893104 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/006c37d3-09e7-4ee5-aedf-8e3ea8049043-openstack-config-secret\") pod \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.893161 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/006c37d3-09e7-4ee5-aedf-8e3ea8049043-ssh-key\") pod \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.893182 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/006c37d3-09e7-4ee5-aedf-8e3ea8049043-test-operator-ephemeral-workdir\") pod \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\" (UID: \"006c37d3-09e7-4ee5-aedf-8e3ea8049043\") " Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.893901 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/006c37d3-09e7-4ee5-aedf-8e3ea8049043-config-data" (OuterVolumeSpecName: "config-data") pod "006c37d3-09e7-4ee5-aedf-8e3ea8049043" (UID: "006c37d3-09e7-4ee5-aedf-8e3ea8049043"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.894123 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/006c37d3-09e7-4ee5-aedf-8e3ea8049043-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "006c37d3-09e7-4ee5-aedf-8e3ea8049043" (UID: "006c37d3-09e7-4ee5-aedf-8e3ea8049043"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.894562 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/006c37d3-09e7-4ee5-aedf-8e3ea8049043-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "006c37d3-09e7-4ee5-aedf-8e3ea8049043" (UID: "006c37d3-09e7-4ee5-aedf-8e3ea8049043"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.899509 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/006c37d3-09e7-4ee5-aedf-8e3ea8049043-kube-api-access-dsj7c" (OuterVolumeSpecName: "kube-api-access-dsj7c") pod "006c37d3-09e7-4ee5-aedf-8e3ea8049043" (UID: "006c37d3-09e7-4ee5-aedf-8e3ea8049043"). InnerVolumeSpecName "kube-api-access-dsj7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.900062 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "test-operator-logs") pod "006c37d3-09e7-4ee5-aedf-8e3ea8049043" (UID: "006c37d3-09e7-4ee5-aedf-8e3ea8049043"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.924790 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/006c37d3-09e7-4ee5-aedf-8e3ea8049043-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "006c37d3-09e7-4ee5-aedf-8e3ea8049043" (UID: "006c37d3-09e7-4ee5-aedf-8e3ea8049043"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.926966 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/006c37d3-09e7-4ee5-aedf-8e3ea8049043-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "006c37d3-09e7-4ee5-aedf-8e3ea8049043" (UID: "006c37d3-09e7-4ee5-aedf-8e3ea8049043"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.928610 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/006c37d3-09e7-4ee5-aedf-8e3ea8049043-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "006c37d3-09e7-4ee5-aedf-8e3ea8049043" (UID: "006c37d3-09e7-4ee5-aedf-8e3ea8049043"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.947984 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/006c37d3-09e7-4ee5-aedf-8e3ea8049043-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "006c37d3-09e7-4ee5-aedf-8e3ea8049043" (UID: "006c37d3-09e7-4ee5-aedf-8e3ea8049043"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.995541 4906 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.995795 4906 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/006c37d3-09e7-4ee5-aedf-8e3ea8049043-ca-certs\") on node \"crc\" DevicePath \"\"" Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.995928 4906 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/006c37d3-09e7-4ee5-aedf-8e3ea8049043-config-data\") on node \"crc\" DevicePath \"\"" Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.995951 4906 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/006c37d3-09e7-4ee5-aedf-8e3ea8049043-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.995966 4906 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/006c37d3-09e7-4ee5-aedf-8e3ea8049043-ssh-key\") on node \"crc\" DevicePath \"\"" Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.995976 4906 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/006c37d3-09e7-4ee5-aedf-8e3ea8049043-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.995986 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dsj7c\" (UniqueName: \"kubernetes.io/projected/006c37d3-09e7-4ee5-aedf-8e3ea8049043-kube-api-access-dsj7c\") on node \"crc\" DevicePath \"\"" Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.995996 4906 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/006c37d3-09e7-4ee5-aedf-8e3ea8049043-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Feb 27 09:18:22 crc kubenswrapper[4906]: I0227 09:18:22.996005 4906 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/006c37d3-09e7-4ee5-aedf-8e3ea8049043-openstack-config\") on node \"crc\" DevicePath \"\"" Feb 27 09:18:23 crc kubenswrapper[4906]: I0227 09:18:23.027626 4906 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Feb 27 09:18:23 crc kubenswrapper[4906]: I0227 09:18:23.098191 4906 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Feb 27 09:18:23 crc kubenswrapper[4906]: I0227 09:18:23.370625 4906 generic.go:334] "Generic (PLEG): container finished" podID="8723d5f6-40fd-4651-b8d4-825293a56289" containerID="cb06eec51ffa55529880c9fa7712cf88534716ebfd6af50c1cafe1ba95437cc5" exitCode=0 Feb 27 09:18:23 crc kubenswrapper[4906]: I0227 09:18:23.370691 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wvpgx" event={"ID":"8723d5f6-40fd-4651-b8d4-825293a56289","Type":"ContainerDied","Data":"cb06eec51ffa55529880c9fa7712cf88534716ebfd6af50c1cafe1ba95437cc5"} Feb 27 09:18:23 crc kubenswrapper[4906]: I0227 09:18:23.377818 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"006c37d3-09e7-4ee5-aedf-8e3ea8049043","Type":"ContainerDied","Data":"281f5b1b7080d587845197d008e68793a24a21fa0afdadaa1aeaeeb408cf861a"} Feb 27 09:18:23 crc kubenswrapper[4906]: I0227 09:18:23.377906 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Feb 27 09:18:23 crc kubenswrapper[4906]: I0227 09:18:23.377924 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="281f5b1b7080d587845197d008e68793a24a21fa0afdadaa1aeaeeb408cf861a" Feb 27 09:18:24 crc kubenswrapper[4906]: I0227 09:18:24.389082 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wvpgx" event={"ID":"8723d5f6-40fd-4651-b8d4-825293a56289","Type":"ContainerStarted","Data":"3bf456a5e3b48b22680db89c0a553b5dcdab28a1be15b1e9a95c597ead7e2b9c"} Feb 27 09:18:24 crc kubenswrapper[4906]: I0227 09:18:24.407706 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wvpgx" podStartSLOduration=1.812602643 podStartE2EDuration="4.407683715s" podCreationTimestamp="2026-02-27 09:18:20 +0000 UTC" firstStartedPulling="2026-02-27 09:18:21.345098084 +0000 UTC m=+2999.739499694" lastFinishedPulling="2026-02-27 09:18:23.940179106 +0000 UTC m=+3002.334580766" observedRunningTime="2026-02-27 09:18:24.405756623 +0000 UTC m=+3002.800158233" watchObservedRunningTime="2026-02-27 09:18:24.407683715 +0000 UTC m=+3002.802085325" Feb 27 09:18:24 crc kubenswrapper[4906]: I0227 09:18:24.844379 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 09:18:24 crc kubenswrapper[4906]: I0227 09:18:24.844441 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 09:18:24 crc kubenswrapper[4906]: I0227 09:18:24.844488 4906 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 09:18:24 crc kubenswrapper[4906]: I0227 09:18:24.845014 4906 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c0ecca24c3dfb89c4c60c4af6e708b2178e936671802de96e3da9b5747bcaeb0"} pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 27 09:18:24 crc kubenswrapper[4906]: I0227 09:18:24.845073 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" containerID="cri-o://c0ecca24c3dfb89c4c60c4af6e708b2178e936671802de96e3da9b5747bcaeb0" gracePeriod=600 Feb 27 09:18:25 crc kubenswrapper[4906]: I0227 09:18:25.400630 4906 generic.go:334] "Generic (PLEG): container finished" podID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerID="c0ecca24c3dfb89c4c60c4af6e708b2178e936671802de96e3da9b5747bcaeb0" exitCode=0 Feb 27 09:18:25 crc kubenswrapper[4906]: I0227 09:18:25.404772 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerDied","Data":"c0ecca24c3dfb89c4c60c4af6e708b2178e936671802de96e3da9b5747bcaeb0"} Feb 27 09:18:25 crc kubenswrapper[4906]: I0227 09:18:25.404852 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerStarted","Data":"a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1"} Feb 27 09:18:25 crc kubenswrapper[4906]: I0227 09:18:25.404910 4906 scope.go:117] "RemoveContainer" containerID="f808e95c21f8599c90ba41ac88300412b4f2b03558b3398a00f42fd8d827aa26" Feb 27 09:18:27 crc kubenswrapper[4906]: I0227 09:18:27.393081 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Feb 27 09:18:27 crc kubenswrapper[4906]: E0227 09:18:27.395950 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="006c37d3-09e7-4ee5-aedf-8e3ea8049043" containerName="tempest-tests-tempest-tests-runner" Feb 27 09:18:27 crc kubenswrapper[4906]: I0227 09:18:27.395986 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="006c37d3-09e7-4ee5-aedf-8e3ea8049043" containerName="tempest-tests-tempest-tests-runner" Feb 27 09:18:27 crc kubenswrapper[4906]: I0227 09:18:27.396398 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="006c37d3-09e7-4ee5-aedf-8e3ea8049043" containerName="tempest-tests-tempest-tests-runner" Feb 27 09:18:27 crc kubenswrapper[4906]: I0227 09:18:27.397669 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 27 09:18:27 crc kubenswrapper[4906]: I0227 09:18:27.401257 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Feb 27 09:18:27 crc kubenswrapper[4906]: I0227 09:18:27.407216 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-gr5lb" Feb 27 09:18:27 crc kubenswrapper[4906]: I0227 09:18:27.488341 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvbmx\" (UniqueName: \"kubernetes.io/projected/81933748-225d-4873-a7ba-4c2a3e91d54a-kube-api-access-jvbmx\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"81933748-225d-4873-a7ba-4c2a3e91d54a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 27 09:18:27 crc kubenswrapper[4906]: I0227 09:18:27.488565 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"81933748-225d-4873-a7ba-4c2a3e91d54a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 27 09:18:27 crc kubenswrapper[4906]: I0227 09:18:27.590509 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"81933748-225d-4873-a7ba-4c2a3e91d54a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 27 09:18:27 crc kubenswrapper[4906]: I0227 09:18:27.590791 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvbmx\" (UniqueName: \"kubernetes.io/projected/81933748-225d-4873-a7ba-4c2a3e91d54a-kube-api-access-jvbmx\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"81933748-225d-4873-a7ba-4c2a3e91d54a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 27 09:18:27 crc kubenswrapper[4906]: I0227 09:18:27.591070 4906 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"81933748-225d-4873-a7ba-4c2a3e91d54a\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 27 09:18:27 crc kubenswrapper[4906]: I0227 09:18:27.622002 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvbmx\" (UniqueName: \"kubernetes.io/projected/81933748-225d-4873-a7ba-4c2a3e91d54a-kube-api-access-jvbmx\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"81933748-225d-4873-a7ba-4c2a3e91d54a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 27 09:18:27 crc kubenswrapper[4906]: I0227 09:18:27.622580 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"81933748-225d-4873-a7ba-4c2a3e91d54a\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 27 09:18:27 crc kubenswrapper[4906]: I0227 09:18:27.718745 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Feb 27 09:18:28 crc kubenswrapper[4906]: I0227 09:18:28.174606 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Feb 27 09:18:28 crc kubenswrapper[4906]: I0227 09:18:28.460651 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"81933748-225d-4873-a7ba-4c2a3e91d54a","Type":"ContainerStarted","Data":"d3e3db6ad1a5a69bfc1565b6389839e19e8f09fe1a2f1941d00bf30fda59befe"} Feb 27 09:18:29 crc kubenswrapper[4906]: I0227 09:18:29.471947 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"81933748-225d-4873-a7ba-4c2a3e91d54a","Type":"ContainerStarted","Data":"c7c9270f2d556bfccde48e6b0352876fb794db0ce7d723e428cb9747ac645109"} Feb 27 09:18:30 crc kubenswrapper[4906]: I0227 09:18:30.496428 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wvpgx" Feb 27 09:18:30 crc kubenswrapper[4906]: I0227 09:18:30.497215 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wvpgx" Feb 27 09:18:30 crc kubenswrapper[4906]: I0227 09:18:30.617198 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wvpgx" Feb 27 09:18:30 crc kubenswrapper[4906]: I0227 09:18:30.654739 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.8315601299999997 podStartE2EDuration="3.654052031s" podCreationTimestamp="2026-02-27 09:18:27 +0000 UTC" firstStartedPulling="2026-02-27 09:18:28.185793733 +0000 UTC m=+3006.580195343" lastFinishedPulling="2026-02-27 09:18:29.008285634 +0000 UTC m=+3007.402687244" observedRunningTime="2026-02-27 09:18:29.48771412 +0000 UTC m=+3007.882115770" watchObservedRunningTime="2026-02-27 09:18:30.654052031 +0000 UTC m=+3009.048453681" Feb 27 09:18:31 crc kubenswrapper[4906]: I0227 09:18:31.545329 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wvpgx" Feb 27 09:18:31 crc kubenswrapper[4906]: I0227 09:18:31.621502 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wvpgx"] Feb 27 09:18:33 crc kubenswrapper[4906]: I0227 09:18:33.517584 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wvpgx" podUID="8723d5f6-40fd-4651-b8d4-825293a56289" containerName="registry-server" containerID="cri-o://3bf456a5e3b48b22680db89c0a553b5dcdab28a1be15b1e9a95c597ead7e2b9c" gracePeriod=2 Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.007027 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wvpgx" Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.143872 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5fxtv\" (UniqueName: \"kubernetes.io/projected/8723d5f6-40fd-4651-b8d4-825293a56289-kube-api-access-5fxtv\") pod \"8723d5f6-40fd-4651-b8d4-825293a56289\" (UID: \"8723d5f6-40fd-4651-b8d4-825293a56289\") " Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.144125 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8723d5f6-40fd-4651-b8d4-825293a56289-utilities\") pod \"8723d5f6-40fd-4651-b8d4-825293a56289\" (UID: \"8723d5f6-40fd-4651-b8d4-825293a56289\") " Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.144195 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8723d5f6-40fd-4651-b8d4-825293a56289-catalog-content\") pod \"8723d5f6-40fd-4651-b8d4-825293a56289\" (UID: \"8723d5f6-40fd-4651-b8d4-825293a56289\") " Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.145349 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8723d5f6-40fd-4651-b8d4-825293a56289-utilities" (OuterVolumeSpecName: "utilities") pod "8723d5f6-40fd-4651-b8d4-825293a56289" (UID: "8723d5f6-40fd-4651-b8d4-825293a56289"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.155587 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8723d5f6-40fd-4651-b8d4-825293a56289-kube-api-access-5fxtv" (OuterVolumeSpecName: "kube-api-access-5fxtv") pod "8723d5f6-40fd-4651-b8d4-825293a56289" (UID: "8723d5f6-40fd-4651-b8d4-825293a56289"). InnerVolumeSpecName "kube-api-access-5fxtv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.182801 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8723d5f6-40fd-4651-b8d4-825293a56289-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8723d5f6-40fd-4651-b8d4-825293a56289" (UID: "8723d5f6-40fd-4651-b8d4-825293a56289"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.247390 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5fxtv\" (UniqueName: \"kubernetes.io/projected/8723d5f6-40fd-4651-b8d4-825293a56289-kube-api-access-5fxtv\") on node \"crc\" DevicePath \"\"" Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.247437 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8723d5f6-40fd-4651-b8d4-825293a56289-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.247448 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8723d5f6-40fd-4651-b8d4-825293a56289-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.542260 4906 generic.go:334] "Generic (PLEG): container finished" podID="8723d5f6-40fd-4651-b8d4-825293a56289" containerID="3bf456a5e3b48b22680db89c0a553b5dcdab28a1be15b1e9a95c597ead7e2b9c" exitCode=0 Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.542325 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wvpgx" event={"ID":"8723d5f6-40fd-4651-b8d4-825293a56289","Type":"ContainerDied","Data":"3bf456a5e3b48b22680db89c0a553b5dcdab28a1be15b1e9a95c597ead7e2b9c"} Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.542368 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wvpgx" event={"ID":"8723d5f6-40fd-4651-b8d4-825293a56289","Type":"ContainerDied","Data":"3a381cee4a7c292acb3edc616941197314733546595ecd721c44d9090d5682ee"} Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.542403 4906 scope.go:117] "RemoveContainer" containerID="3bf456a5e3b48b22680db89c0a553b5dcdab28a1be15b1e9a95c597ead7e2b9c" Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.542642 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wvpgx" Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.591622 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wvpgx"] Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.592012 4906 scope.go:117] "RemoveContainer" containerID="cb06eec51ffa55529880c9fa7712cf88534716ebfd6af50c1cafe1ba95437cc5" Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.602291 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wvpgx"] Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.618874 4906 scope.go:117] "RemoveContainer" containerID="c9365be65b2265a2cd0d8249f3c93c3ff67b92c7b2adc5daabe9b2014c02b04c" Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.679725 4906 scope.go:117] "RemoveContainer" containerID="3bf456a5e3b48b22680db89c0a553b5dcdab28a1be15b1e9a95c597ead7e2b9c" Feb 27 09:18:34 crc kubenswrapper[4906]: E0227 09:18:34.680534 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3bf456a5e3b48b22680db89c0a553b5dcdab28a1be15b1e9a95c597ead7e2b9c\": container with ID starting with 3bf456a5e3b48b22680db89c0a553b5dcdab28a1be15b1e9a95c597ead7e2b9c not found: ID does not exist" containerID="3bf456a5e3b48b22680db89c0a553b5dcdab28a1be15b1e9a95c597ead7e2b9c" Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.681066 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3bf456a5e3b48b22680db89c0a553b5dcdab28a1be15b1e9a95c597ead7e2b9c"} err="failed to get container status \"3bf456a5e3b48b22680db89c0a553b5dcdab28a1be15b1e9a95c597ead7e2b9c\": rpc error: code = NotFound desc = could not find container \"3bf456a5e3b48b22680db89c0a553b5dcdab28a1be15b1e9a95c597ead7e2b9c\": container with ID starting with 3bf456a5e3b48b22680db89c0a553b5dcdab28a1be15b1e9a95c597ead7e2b9c not found: ID does not exist" Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.681279 4906 scope.go:117] "RemoveContainer" containerID="cb06eec51ffa55529880c9fa7712cf88534716ebfd6af50c1cafe1ba95437cc5" Feb 27 09:18:34 crc kubenswrapper[4906]: E0227 09:18:34.681916 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb06eec51ffa55529880c9fa7712cf88534716ebfd6af50c1cafe1ba95437cc5\": container with ID starting with cb06eec51ffa55529880c9fa7712cf88534716ebfd6af50c1cafe1ba95437cc5 not found: ID does not exist" containerID="cb06eec51ffa55529880c9fa7712cf88534716ebfd6af50c1cafe1ba95437cc5" Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.681964 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb06eec51ffa55529880c9fa7712cf88534716ebfd6af50c1cafe1ba95437cc5"} err="failed to get container status \"cb06eec51ffa55529880c9fa7712cf88534716ebfd6af50c1cafe1ba95437cc5\": rpc error: code = NotFound desc = could not find container \"cb06eec51ffa55529880c9fa7712cf88534716ebfd6af50c1cafe1ba95437cc5\": container with ID starting with cb06eec51ffa55529880c9fa7712cf88534716ebfd6af50c1cafe1ba95437cc5 not found: ID does not exist" Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.681997 4906 scope.go:117] "RemoveContainer" containerID="c9365be65b2265a2cd0d8249f3c93c3ff67b92c7b2adc5daabe9b2014c02b04c" Feb 27 09:18:34 crc kubenswrapper[4906]: E0227 09:18:34.682266 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9365be65b2265a2cd0d8249f3c93c3ff67b92c7b2adc5daabe9b2014c02b04c\": container with ID starting with c9365be65b2265a2cd0d8249f3c93c3ff67b92c7b2adc5daabe9b2014c02b04c not found: ID does not exist" containerID="c9365be65b2265a2cd0d8249f3c93c3ff67b92c7b2adc5daabe9b2014c02b04c" Feb 27 09:18:34 crc kubenswrapper[4906]: I0227 09:18:34.682312 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9365be65b2265a2cd0d8249f3c93c3ff67b92c7b2adc5daabe9b2014c02b04c"} err="failed to get container status \"c9365be65b2265a2cd0d8249f3c93c3ff67b92c7b2adc5daabe9b2014c02b04c\": rpc error: code = NotFound desc = could not find container \"c9365be65b2265a2cd0d8249f3c93c3ff67b92c7b2adc5daabe9b2014c02b04c\": container with ID starting with c9365be65b2265a2cd0d8249f3c93c3ff67b92c7b2adc5daabe9b2014c02b04c not found: ID does not exist" Feb 27 09:18:36 crc kubenswrapper[4906]: I0227 09:18:36.579597 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8723d5f6-40fd-4651-b8d4-825293a56289" path="/var/lib/kubelet/pods/8723d5f6-40fd-4651-b8d4-825293a56289/volumes" Feb 27 09:18:59 crc kubenswrapper[4906]: I0227 09:18:59.603346 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-6zg9v/must-gather-4b62r"] Feb 27 09:18:59 crc kubenswrapper[4906]: E0227 09:18:59.604375 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8723d5f6-40fd-4651-b8d4-825293a56289" containerName="extract-utilities" Feb 27 09:18:59 crc kubenswrapper[4906]: I0227 09:18:59.604394 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="8723d5f6-40fd-4651-b8d4-825293a56289" containerName="extract-utilities" Feb 27 09:18:59 crc kubenswrapper[4906]: E0227 09:18:59.604424 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8723d5f6-40fd-4651-b8d4-825293a56289" containerName="extract-content" Feb 27 09:18:59 crc kubenswrapper[4906]: I0227 09:18:59.604434 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="8723d5f6-40fd-4651-b8d4-825293a56289" containerName="extract-content" Feb 27 09:18:59 crc kubenswrapper[4906]: E0227 09:18:59.604460 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8723d5f6-40fd-4651-b8d4-825293a56289" containerName="registry-server" Feb 27 09:18:59 crc kubenswrapper[4906]: I0227 09:18:59.604469 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="8723d5f6-40fd-4651-b8d4-825293a56289" containerName="registry-server" Feb 27 09:18:59 crc kubenswrapper[4906]: I0227 09:18:59.604710 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="8723d5f6-40fd-4651-b8d4-825293a56289" containerName="registry-server" Feb 27 09:18:59 crc kubenswrapper[4906]: I0227 09:18:59.605937 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6zg9v/must-gather-4b62r" Feb 27 09:18:59 crc kubenswrapper[4906]: I0227 09:18:59.607719 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-6zg9v"/"default-dockercfg-f4xbl" Feb 27 09:18:59 crc kubenswrapper[4906]: I0227 09:18:59.608048 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-6zg9v"/"kube-root-ca.crt" Feb 27 09:18:59 crc kubenswrapper[4906]: I0227 09:18:59.608551 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-6zg9v"/"openshift-service-ca.crt" Feb 27 09:18:59 crc kubenswrapper[4906]: I0227 09:18:59.625350 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-6zg9v/must-gather-4b62r"] Feb 27 09:18:59 crc kubenswrapper[4906]: I0227 09:18:59.730729 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/679afc7e-dcf2-409a-8b09-8af90de74b78-must-gather-output\") pod \"must-gather-4b62r\" (UID: \"679afc7e-dcf2-409a-8b09-8af90de74b78\") " pod="openshift-must-gather-6zg9v/must-gather-4b62r" Feb 27 09:18:59 crc kubenswrapper[4906]: I0227 09:18:59.731084 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpvjn\" (UniqueName: \"kubernetes.io/projected/679afc7e-dcf2-409a-8b09-8af90de74b78-kube-api-access-qpvjn\") pod \"must-gather-4b62r\" (UID: \"679afc7e-dcf2-409a-8b09-8af90de74b78\") " pod="openshift-must-gather-6zg9v/must-gather-4b62r" Feb 27 09:18:59 crc kubenswrapper[4906]: I0227 09:18:59.832442 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/679afc7e-dcf2-409a-8b09-8af90de74b78-must-gather-output\") pod \"must-gather-4b62r\" (UID: \"679afc7e-dcf2-409a-8b09-8af90de74b78\") " pod="openshift-must-gather-6zg9v/must-gather-4b62r" Feb 27 09:18:59 crc kubenswrapper[4906]: I0227 09:18:59.832574 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpvjn\" (UniqueName: \"kubernetes.io/projected/679afc7e-dcf2-409a-8b09-8af90de74b78-kube-api-access-qpvjn\") pod \"must-gather-4b62r\" (UID: \"679afc7e-dcf2-409a-8b09-8af90de74b78\") " pod="openshift-must-gather-6zg9v/must-gather-4b62r" Feb 27 09:18:59 crc kubenswrapper[4906]: I0227 09:18:59.833169 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/679afc7e-dcf2-409a-8b09-8af90de74b78-must-gather-output\") pod \"must-gather-4b62r\" (UID: \"679afc7e-dcf2-409a-8b09-8af90de74b78\") " pod="openshift-must-gather-6zg9v/must-gather-4b62r" Feb 27 09:18:59 crc kubenswrapper[4906]: I0227 09:18:59.853781 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpvjn\" (UniqueName: \"kubernetes.io/projected/679afc7e-dcf2-409a-8b09-8af90de74b78-kube-api-access-qpvjn\") pod \"must-gather-4b62r\" (UID: \"679afc7e-dcf2-409a-8b09-8af90de74b78\") " pod="openshift-must-gather-6zg9v/must-gather-4b62r" Feb 27 09:18:59 crc kubenswrapper[4906]: I0227 09:18:59.935037 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6zg9v/must-gather-4b62r" Feb 27 09:19:00 crc kubenswrapper[4906]: I0227 09:19:00.360430 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-6zg9v/must-gather-4b62r"] Feb 27 09:19:00 crc kubenswrapper[4906]: I0227 09:19:00.835913 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6zg9v/must-gather-4b62r" event={"ID":"679afc7e-dcf2-409a-8b09-8af90de74b78","Type":"ContainerStarted","Data":"91a26e9a47b5307e095aeef39193103f17ae145fbf40d439f9f7cdd31bc5c0e4"} Feb 27 09:19:05 crc kubenswrapper[4906]: I0227 09:19:05.426268 4906 scope.go:117] "RemoveContainer" containerID="52d9da2720ab9eb9a68c8309980f87887bd456fe8cf7d7575333bbfb1a4452bb" Feb 27 09:19:07 crc kubenswrapper[4906]: I0227 09:19:07.900580 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6zg9v/must-gather-4b62r" event={"ID":"679afc7e-dcf2-409a-8b09-8af90de74b78","Type":"ContainerStarted","Data":"057de8cfcc67191e3cf82492708cd774bb50fccaeb45149cf3f74be3e12611b1"} Feb 27 09:19:08 crc kubenswrapper[4906]: I0227 09:19:08.919227 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6zg9v/must-gather-4b62r" event={"ID":"679afc7e-dcf2-409a-8b09-8af90de74b78","Type":"ContainerStarted","Data":"30d354684b73a93a5f0406365c55c0a364cdcb4da8dfece5f69988d9717bb6ec"} Feb 27 09:19:08 crc kubenswrapper[4906]: I0227 09:19:08.948140 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-6zg9v/must-gather-4b62r" podStartSLOduration=2.899760088 podStartE2EDuration="9.948117152s" podCreationTimestamp="2026-02-27 09:18:59 +0000 UTC" firstStartedPulling="2026-02-27 09:19:00.372753272 +0000 UTC m=+3038.767154882" lastFinishedPulling="2026-02-27 09:19:07.421110326 +0000 UTC m=+3045.815511946" observedRunningTime="2026-02-27 09:19:08.941452545 +0000 UTC m=+3047.335854195" watchObservedRunningTime="2026-02-27 09:19:08.948117152 +0000 UTC m=+3047.342518772" Feb 27 09:19:11 crc kubenswrapper[4906]: I0227 09:19:11.464372 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-6zg9v/crc-debug-vqjj4"] Feb 27 09:19:11 crc kubenswrapper[4906]: I0227 09:19:11.466240 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6zg9v/crc-debug-vqjj4" Feb 27 09:19:11 crc kubenswrapper[4906]: I0227 09:19:11.593983 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vt8rb\" (UniqueName: \"kubernetes.io/projected/999caa7f-2231-416c-9981-3ce82b5d8d68-kube-api-access-vt8rb\") pod \"crc-debug-vqjj4\" (UID: \"999caa7f-2231-416c-9981-3ce82b5d8d68\") " pod="openshift-must-gather-6zg9v/crc-debug-vqjj4" Feb 27 09:19:11 crc kubenswrapper[4906]: I0227 09:19:11.594072 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/999caa7f-2231-416c-9981-3ce82b5d8d68-host\") pod \"crc-debug-vqjj4\" (UID: \"999caa7f-2231-416c-9981-3ce82b5d8d68\") " pod="openshift-must-gather-6zg9v/crc-debug-vqjj4" Feb 27 09:19:11 crc kubenswrapper[4906]: I0227 09:19:11.695671 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/999caa7f-2231-416c-9981-3ce82b5d8d68-host\") pod \"crc-debug-vqjj4\" (UID: \"999caa7f-2231-416c-9981-3ce82b5d8d68\") " pod="openshift-must-gather-6zg9v/crc-debug-vqjj4" Feb 27 09:19:11 crc kubenswrapper[4906]: I0227 09:19:11.695839 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vt8rb\" (UniqueName: \"kubernetes.io/projected/999caa7f-2231-416c-9981-3ce82b5d8d68-kube-api-access-vt8rb\") pod \"crc-debug-vqjj4\" (UID: \"999caa7f-2231-416c-9981-3ce82b5d8d68\") " pod="openshift-must-gather-6zg9v/crc-debug-vqjj4" Feb 27 09:19:11 crc kubenswrapper[4906]: I0227 09:19:11.695844 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/999caa7f-2231-416c-9981-3ce82b5d8d68-host\") pod \"crc-debug-vqjj4\" (UID: \"999caa7f-2231-416c-9981-3ce82b5d8d68\") " pod="openshift-must-gather-6zg9v/crc-debug-vqjj4" Feb 27 09:19:11 crc kubenswrapper[4906]: I0227 09:19:11.725199 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vt8rb\" (UniqueName: \"kubernetes.io/projected/999caa7f-2231-416c-9981-3ce82b5d8d68-kube-api-access-vt8rb\") pod \"crc-debug-vqjj4\" (UID: \"999caa7f-2231-416c-9981-3ce82b5d8d68\") " pod="openshift-must-gather-6zg9v/crc-debug-vqjj4" Feb 27 09:19:11 crc kubenswrapper[4906]: I0227 09:19:11.785178 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6zg9v/crc-debug-vqjj4" Feb 27 09:19:11 crc kubenswrapper[4906]: W0227 09:19:11.817069 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod999caa7f_2231_416c_9981_3ce82b5d8d68.slice/crio-e892bab0f3316a578b5f4b3e005ff029e3c95416225479671160b9835aa9c790 WatchSource:0}: Error finding container e892bab0f3316a578b5f4b3e005ff029e3c95416225479671160b9835aa9c790: Status 404 returned error can't find the container with id e892bab0f3316a578b5f4b3e005ff029e3c95416225479671160b9835aa9c790 Feb 27 09:19:11 crc kubenswrapper[4906]: I0227 09:19:11.955424 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6zg9v/crc-debug-vqjj4" event={"ID":"999caa7f-2231-416c-9981-3ce82b5d8d68","Type":"ContainerStarted","Data":"e892bab0f3316a578b5f4b3e005ff029e3c95416225479671160b9835aa9c790"} Feb 27 09:19:24 crc kubenswrapper[4906]: I0227 09:19:24.077930 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6zg9v/crc-debug-vqjj4" event={"ID":"999caa7f-2231-416c-9981-3ce82b5d8d68","Type":"ContainerStarted","Data":"554598311e09d31338cd5c3985712427fdb45c36de494f77a91f0998b33595fb"} Feb 27 09:19:24 crc kubenswrapper[4906]: I0227 09:19:24.097092 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-6zg9v/crc-debug-vqjj4" podStartSLOduration=1.117526875 podStartE2EDuration="13.097070286s" podCreationTimestamp="2026-02-27 09:19:11 +0000 UTC" firstStartedPulling="2026-02-27 09:19:11.820174404 +0000 UTC m=+3050.214576014" lastFinishedPulling="2026-02-27 09:19:23.799717815 +0000 UTC m=+3062.194119425" observedRunningTime="2026-02-27 09:19:24.091622871 +0000 UTC m=+3062.486024491" watchObservedRunningTime="2026-02-27 09:19:24.097070286 +0000 UTC m=+3062.491471896" Feb 27 09:19:39 crc kubenswrapper[4906]: I0227 09:19:39.296274 4906 generic.go:334] "Generic (PLEG): container finished" podID="999caa7f-2231-416c-9981-3ce82b5d8d68" containerID="554598311e09d31338cd5c3985712427fdb45c36de494f77a91f0998b33595fb" exitCode=0 Feb 27 09:19:39 crc kubenswrapper[4906]: I0227 09:19:39.296333 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6zg9v/crc-debug-vqjj4" event={"ID":"999caa7f-2231-416c-9981-3ce82b5d8d68","Type":"ContainerDied","Data":"554598311e09d31338cd5c3985712427fdb45c36de494f77a91f0998b33595fb"} Feb 27 09:19:40 crc kubenswrapper[4906]: I0227 09:19:40.449517 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6zg9v/crc-debug-vqjj4" Feb 27 09:19:40 crc kubenswrapper[4906]: I0227 09:19:40.482779 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-6zg9v/crc-debug-vqjj4"] Feb 27 09:19:40 crc kubenswrapper[4906]: I0227 09:19:40.494090 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-6zg9v/crc-debug-vqjj4"] Feb 27 09:19:40 crc kubenswrapper[4906]: I0227 09:19:40.512065 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/999caa7f-2231-416c-9981-3ce82b5d8d68-host\") pod \"999caa7f-2231-416c-9981-3ce82b5d8d68\" (UID: \"999caa7f-2231-416c-9981-3ce82b5d8d68\") " Feb 27 09:19:40 crc kubenswrapper[4906]: I0227 09:19:40.512135 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt8rb\" (UniqueName: \"kubernetes.io/projected/999caa7f-2231-416c-9981-3ce82b5d8d68-kube-api-access-vt8rb\") pod \"999caa7f-2231-416c-9981-3ce82b5d8d68\" (UID: \"999caa7f-2231-416c-9981-3ce82b5d8d68\") " Feb 27 09:19:40 crc kubenswrapper[4906]: I0227 09:19:40.512178 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/999caa7f-2231-416c-9981-3ce82b5d8d68-host" (OuterVolumeSpecName: "host") pod "999caa7f-2231-416c-9981-3ce82b5d8d68" (UID: "999caa7f-2231-416c-9981-3ce82b5d8d68"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 09:19:40 crc kubenswrapper[4906]: I0227 09:19:40.512685 4906 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/999caa7f-2231-416c-9981-3ce82b5d8d68-host\") on node \"crc\" DevicePath \"\"" Feb 27 09:19:40 crc kubenswrapper[4906]: I0227 09:19:40.527093 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/999caa7f-2231-416c-9981-3ce82b5d8d68-kube-api-access-vt8rb" (OuterVolumeSpecName: "kube-api-access-vt8rb") pod "999caa7f-2231-416c-9981-3ce82b5d8d68" (UID: "999caa7f-2231-416c-9981-3ce82b5d8d68"). InnerVolumeSpecName "kube-api-access-vt8rb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:19:40 crc kubenswrapper[4906]: I0227 09:19:40.562619 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="999caa7f-2231-416c-9981-3ce82b5d8d68" path="/var/lib/kubelet/pods/999caa7f-2231-416c-9981-3ce82b5d8d68/volumes" Feb 27 09:19:40 crc kubenswrapper[4906]: I0227 09:19:40.615163 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt8rb\" (UniqueName: \"kubernetes.io/projected/999caa7f-2231-416c-9981-3ce82b5d8d68-kube-api-access-vt8rb\") on node \"crc\" DevicePath \"\"" Feb 27 09:19:41 crc kubenswrapper[4906]: I0227 09:19:41.317183 4906 scope.go:117] "RemoveContainer" containerID="554598311e09d31338cd5c3985712427fdb45c36de494f77a91f0998b33595fb" Feb 27 09:19:41 crc kubenswrapper[4906]: I0227 09:19:41.317215 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6zg9v/crc-debug-vqjj4" Feb 27 09:19:41 crc kubenswrapper[4906]: I0227 09:19:41.783026 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-6zg9v/crc-debug-swr76"] Feb 27 09:19:41 crc kubenswrapper[4906]: E0227 09:19:41.783833 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="999caa7f-2231-416c-9981-3ce82b5d8d68" containerName="container-00" Feb 27 09:19:41 crc kubenswrapper[4906]: I0227 09:19:41.783851 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="999caa7f-2231-416c-9981-3ce82b5d8d68" containerName="container-00" Feb 27 09:19:41 crc kubenswrapper[4906]: I0227 09:19:41.784104 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="999caa7f-2231-416c-9981-3ce82b5d8d68" containerName="container-00" Feb 27 09:19:41 crc kubenswrapper[4906]: I0227 09:19:41.785057 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6zg9v/crc-debug-swr76" Feb 27 09:19:41 crc kubenswrapper[4906]: I0227 09:19:41.837762 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4kr7j\" (UniqueName: \"kubernetes.io/projected/f2117d52-9f20-4e45-84b3-8b53ac2113df-kube-api-access-4kr7j\") pod \"crc-debug-swr76\" (UID: \"f2117d52-9f20-4e45-84b3-8b53ac2113df\") " pod="openshift-must-gather-6zg9v/crc-debug-swr76" Feb 27 09:19:41 crc kubenswrapper[4906]: I0227 09:19:41.837814 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f2117d52-9f20-4e45-84b3-8b53ac2113df-host\") pod \"crc-debug-swr76\" (UID: \"f2117d52-9f20-4e45-84b3-8b53ac2113df\") " pod="openshift-must-gather-6zg9v/crc-debug-swr76" Feb 27 09:19:41 crc kubenswrapper[4906]: I0227 09:19:41.940791 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4kr7j\" (UniqueName: \"kubernetes.io/projected/f2117d52-9f20-4e45-84b3-8b53ac2113df-kube-api-access-4kr7j\") pod \"crc-debug-swr76\" (UID: \"f2117d52-9f20-4e45-84b3-8b53ac2113df\") " pod="openshift-must-gather-6zg9v/crc-debug-swr76" Feb 27 09:19:41 crc kubenswrapper[4906]: I0227 09:19:41.940843 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f2117d52-9f20-4e45-84b3-8b53ac2113df-host\") pod \"crc-debug-swr76\" (UID: \"f2117d52-9f20-4e45-84b3-8b53ac2113df\") " pod="openshift-must-gather-6zg9v/crc-debug-swr76" Feb 27 09:19:41 crc kubenswrapper[4906]: I0227 09:19:41.941029 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f2117d52-9f20-4e45-84b3-8b53ac2113df-host\") pod \"crc-debug-swr76\" (UID: \"f2117d52-9f20-4e45-84b3-8b53ac2113df\") " pod="openshift-must-gather-6zg9v/crc-debug-swr76" Feb 27 09:19:41 crc kubenswrapper[4906]: I0227 09:19:41.963471 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4kr7j\" (UniqueName: \"kubernetes.io/projected/f2117d52-9f20-4e45-84b3-8b53ac2113df-kube-api-access-4kr7j\") pod \"crc-debug-swr76\" (UID: \"f2117d52-9f20-4e45-84b3-8b53ac2113df\") " pod="openshift-must-gather-6zg9v/crc-debug-swr76" Feb 27 09:19:42 crc kubenswrapper[4906]: I0227 09:19:42.108653 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6zg9v/crc-debug-swr76" Feb 27 09:19:42 crc kubenswrapper[4906]: I0227 09:19:42.328839 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6zg9v/crc-debug-swr76" event={"ID":"f2117d52-9f20-4e45-84b3-8b53ac2113df","Type":"ContainerStarted","Data":"4bd9c9daa62a9c14f195eb85654515fac962185525eef4236b949668ea2959e2"} Feb 27 09:19:43 crc kubenswrapper[4906]: I0227 09:19:43.340188 4906 generic.go:334] "Generic (PLEG): container finished" podID="f2117d52-9f20-4e45-84b3-8b53ac2113df" containerID="0a469d328cb88fad2113c712c4b4645cc734d6ef90bffd179b6ea0e00fadba75" exitCode=1 Feb 27 09:19:43 crc kubenswrapper[4906]: I0227 09:19:43.340240 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6zg9v/crc-debug-swr76" event={"ID":"f2117d52-9f20-4e45-84b3-8b53ac2113df","Type":"ContainerDied","Data":"0a469d328cb88fad2113c712c4b4645cc734d6ef90bffd179b6ea0e00fadba75"} Feb 27 09:19:43 crc kubenswrapper[4906]: I0227 09:19:43.381359 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-6zg9v/crc-debug-swr76"] Feb 27 09:19:43 crc kubenswrapper[4906]: I0227 09:19:43.391413 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-6zg9v/crc-debug-swr76"] Feb 27 09:19:44 crc kubenswrapper[4906]: I0227 09:19:44.469749 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6zg9v/crc-debug-swr76" Feb 27 09:19:44 crc kubenswrapper[4906]: I0227 09:19:44.492094 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f2117d52-9f20-4e45-84b3-8b53ac2113df-host\") pod \"f2117d52-9f20-4e45-84b3-8b53ac2113df\" (UID: \"f2117d52-9f20-4e45-84b3-8b53ac2113df\") " Feb 27 09:19:44 crc kubenswrapper[4906]: I0227 09:19:44.492222 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f2117d52-9f20-4e45-84b3-8b53ac2113df-host" (OuterVolumeSpecName: "host") pod "f2117d52-9f20-4e45-84b3-8b53ac2113df" (UID: "f2117d52-9f20-4e45-84b3-8b53ac2113df"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 27 09:19:44 crc kubenswrapper[4906]: I0227 09:19:44.492340 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4kr7j\" (UniqueName: \"kubernetes.io/projected/f2117d52-9f20-4e45-84b3-8b53ac2113df-kube-api-access-4kr7j\") pod \"f2117d52-9f20-4e45-84b3-8b53ac2113df\" (UID: \"f2117d52-9f20-4e45-84b3-8b53ac2113df\") " Feb 27 09:19:44 crc kubenswrapper[4906]: I0227 09:19:44.492867 4906 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f2117d52-9f20-4e45-84b3-8b53ac2113df-host\") on node \"crc\" DevicePath \"\"" Feb 27 09:19:44 crc kubenswrapper[4906]: I0227 09:19:44.498186 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2117d52-9f20-4e45-84b3-8b53ac2113df-kube-api-access-4kr7j" (OuterVolumeSpecName: "kube-api-access-4kr7j") pod "f2117d52-9f20-4e45-84b3-8b53ac2113df" (UID: "f2117d52-9f20-4e45-84b3-8b53ac2113df"). InnerVolumeSpecName "kube-api-access-4kr7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:19:44 crc kubenswrapper[4906]: I0227 09:19:44.562503 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2117d52-9f20-4e45-84b3-8b53ac2113df" path="/var/lib/kubelet/pods/f2117d52-9f20-4e45-84b3-8b53ac2113df/volumes" Feb 27 09:19:44 crc kubenswrapper[4906]: I0227 09:19:44.593832 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4kr7j\" (UniqueName: \"kubernetes.io/projected/f2117d52-9f20-4e45-84b3-8b53ac2113df-kube-api-access-4kr7j\") on node \"crc\" DevicePath \"\"" Feb 27 09:19:45 crc kubenswrapper[4906]: I0227 09:19:45.156041 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2zmp7"] Feb 27 09:19:45 crc kubenswrapper[4906]: E0227 09:19:45.156536 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2117d52-9f20-4e45-84b3-8b53ac2113df" containerName="container-00" Feb 27 09:19:45 crc kubenswrapper[4906]: I0227 09:19:45.156558 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2117d52-9f20-4e45-84b3-8b53ac2113df" containerName="container-00" Feb 27 09:19:45 crc kubenswrapper[4906]: I0227 09:19:45.156823 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2117d52-9f20-4e45-84b3-8b53ac2113df" containerName="container-00" Feb 27 09:19:45 crc kubenswrapper[4906]: I0227 09:19:45.160021 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2zmp7" Feb 27 09:19:45 crc kubenswrapper[4906]: I0227 09:19:45.168124 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2zmp7"] Feb 27 09:19:45 crc kubenswrapper[4906]: I0227 09:19:45.201966 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrdcj\" (UniqueName: \"kubernetes.io/projected/fca59eb5-f6b8-4165-8068-e76ac6dd876e-kube-api-access-qrdcj\") pod \"certified-operators-2zmp7\" (UID: \"fca59eb5-f6b8-4165-8068-e76ac6dd876e\") " pod="openshift-marketplace/certified-operators-2zmp7" Feb 27 09:19:45 crc kubenswrapper[4906]: I0227 09:19:45.202231 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fca59eb5-f6b8-4165-8068-e76ac6dd876e-catalog-content\") pod \"certified-operators-2zmp7\" (UID: \"fca59eb5-f6b8-4165-8068-e76ac6dd876e\") " pod="openshift-marketplace/certified-operators-2zmp7" Feb 27 09:19:45 crc kubenswrapper[4906]: I0227 09:19:45.202302 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fca59eb5-f6b8-4165-8068-e76ac6dd876e-utilities\") pod \"certified-operators-2zmp7\" (UID: \"fca59eb5-f6b8-4165-8068-e76ac6dd876e\") " pod="openshift-marketplace/certified-operators-2zmp7" Feb 27 09:19:45 crc kubenswrapper[4906]: I0227 09:19:45.304807 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrdcj\" (UniqueName: \"kubernetes.io/projected/fca59eb5-f6b8-4165-8068-e76ac6dd876e-kube-api-access-qrdcj\") pod \"certified-operators-2zmp7\" (UID: \"fca59eb5-f6b8-4165-8068-e76ac6dd876e\") " pod="openshift-marketplace/certified-operators-2zmp7" Feb 27 09:19:45 crc kubenswrapper[4906]: I0227 09:19:45.305027 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fca59eb5-f6b8-4165-8068-e76ac6dd876e-catalog-content\") pod \"certified-operators-2zmp7\" (UID: \"fca59eb5-f6b8-4165-8068-e76ac6dd876e\") " pod="openshift-marketplace/certified-operators-2zmp7" Feb 27 09:19:45 crc kubenswrapper[4906]: I0227 09:19:45.305099 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fca59eb5-f6b8-4165-8068-e76ac6dd876e-utilities\") pod \"certified-operators-2zmp7\" (UID: \"fca59eb5-f6b8-4165-8068-e76ac6dd876e\") " pod="openshift-marketplace/certified-operators-2zmp7" Feb 27 09:19:45 crc kubenswrapper[4906]: I0227 09:19:45.305968 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fca59eb5-f6b8-4165-8068-e76ac6dd876e-catalog-content\") pod \"certified-operators-2zmp7\" (UID: \"fca59eb5-f6b8-4165-8068-e76ac6dd876e\") " pod="openshift-marketplace/certified-operators-2zmp7" Feb 27 09:19:45 crc kubenswrapper[4906]: I0227 09:19:45.305995 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fca59eb5-f6b8-4165-8068-e76ac6dd876e-utilities\") pod \"certified-operators-2zmp7\" (UID: \"fca59eb5-f6b8-4165-8068-e76ac6dd876e\") " pod="openshift-marketplace/certified-operators-2zmp7" Feb 27 09:19:45 crc kubenswrapper[4906]: I0227 09:19:45.327768 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrdcj\" (UniqueName: \"kubernetes.io/projected/fca59eb5-f6b8-4165-8068-e76ac6dd876e-kube-api-access-qrdcj\") pod \"certified-operators-2zmp7\" (UID: \"fca59eb5-f6b8-4165-8068-e76ac6dd876e\") " pod="openshift-marketplace/certified-operators-2zmp7" Feb 27 09:19:45 crc kubenswrapper[4906]: I0227 09:19:45.377947 4906 scope.go:117] "RemoveContainer" containerID="0a469d328cb88fad2113c712c4b4645cc734d6ef90bffd179b6ea0e00fadba75" Feb 27 09:19:45 crc kubenswrapper[4906]: I0227 09:19:45.378147 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6zg9v/crc-debug-swr76" Feb 27 09:19:45 crc kubenswrapper[4906]: I0227 09:19:45.491652 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2zmp7" Feb 27 09:19:46 crc kubenswrapper[4906]: W0227 09:19:46.065131 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfca59eb5_f6b8_4165_8068_e76ac6dd876e.slice/crio-d10cf2faeed8e99b4ddeb506b6113eca5280bfd7eec855708212549032d48afe WatchSource:0}: Error finding container d10cf2faeed8e99b4ddeb506b6113eca5280bfd7eec855708212549032d48afe: Status 404 returned error can't find the container with id d10cf2faeed8e99b4ddeb506b6113eca5280bfd7eec855708212549032d48afe Feb 27 09:19:46 crc kubenswrapper[4906]: I0227 09:19:46.066553 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2zmp7"] Feb 27 09:19:46 crc kubenswrapper[4906]: I0227 09:19:46.390662 4906 generic.go:334] "Generic (PLEG): container finished" podID="fca59eb5-f6b8-4165-8068-e76ac6dd876e" containerID="e00c2a1ad74a9ecc6be2c1357ff1e9e5d7bb8076484fca58b8d6324ea40fe574" exitCode=0 Feb 27 09:19:46 crc kubenswrapper[4906]: I0227 09:19:46.390710 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2zmp7" event={"ID":"fca59eb5-f6b8-4165-8068-e76ac6dd876e","Type":"ContainerDied","Data":"e00c2a1ad74a9ecc6be2c1357ff1e9e5d7bb8076484fca58b8d6324ea40fe574"} Feb 27 09:19:46 crc kubenswrapper[4906]: I0227 09:19:46.390734 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2zmp7" event={"ID":"fca59eb5-f6b8-4165-8068-e76ac6dd876e","Type":"ContainerStarted","Data":"d10cf2faeed8e99b4ddeb506b6113eca5280bfd7eec855708212549032d48afe"} Feb 27 09:19:48 crc kubenswrapper[4906]: I0227 09:19:48.418959 4906 generic.go:334] "Generic (PLEG): container finished" podID="fca59eb5-f6b8-4165-8068-e76ac6dd876e" containerID="768ee84739a769391bfe88ad22528ecd1a222ffce433ad61292dbc9f4c9b9815" exitCode=0 Feb 27 09:19:48 crc kubenswrapper[4906]: I0227 09:19:48.419084 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2zmp7" event={"ID":"fca59eb5-f6b8-4165-8068-e76ac6dd876e","Type":"ContainerDied","Data":"768ee84739a769391bfe88ad22528ecd1a222ffce433ad61292dbc9f4c9b9815"} Feb 27 09:19:51 crc kubenswrapper[4906]: I0227 09:19:51.445537 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2zmp7" event={"ID":"fca59eb5-f6b8-4165-8068-e76ac6dd876e","Type":"ContainerStarted","Data":"b8f489d13948162c561d6cf6ed04f235f9d3155f41c94ca1072f58b1390a180d"} Feb 27 09:19:51 crc kubenswrapper[4906]: I0227 09:19:51.470313 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2zmp7" podStartSLOduration=2.70711619 podStartE2EDuration="6.470291281s" podCreationTimestamp="2026-02-27 09:19:45 +0000 UTC" firstStartedPulling="2026-02-27 09:19:46.392581457 +0000 UTC m=+3084.786983067" lastFinishedPulling="2026-02-27 09:19:50.155756548 +0000 UTC m=+3088.550158158" observedRunningTime="2026-02-27 09:19:51.464720333 +0000 UTC m=+3089.859121953" watchObservedRunningTime="2026-02-27 09:19:51.470291281 +0000 UTC m=+3089.864692901" Feb 27 09:19:55 crc kubenswrapper[4906]: I0227 09:19:55.492153 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2zmp7" Feb 27 09:19:55 crc kubenswrapper[4906]: I0227 09:19:55.492732 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2zmp7" Feb 27 09:19:55 crc kubenswrapper[4906]: I0227 09:19:55.538454 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2zmp7" Feb 27 09:19:56 crc kubenswrapper[4906]: I0227 09:19:56.548977 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2zmp7" Feb 27 09:19:56 crc kubenswrapper[4906]: I0227 09:19:56.613003 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2zmp7"] Feb 27 09:19:58 crc kubenswrapper[4906]: I0227 09:19:58.497503 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2zmp7" podUID="fca59eb5-f6b8-4165-8068-e76ac6dd876e" containerName="registry-server" containerID="cri-o://b8f489d13948162c561d6cf6ed04f235f9d3155f41c94ca1072f58b1390a180d" gracePeriod=2 Feb 27 09:19:58 crc kubenswrapper[4906]: I0227 09:19:58.957407 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2zmp7" Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.092826 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fca59eb5-f6b8-4165-8068-e76ac6dd876e-utilities\") pod \"fca59eb5-f6b8-4165-8068-e76ac6dd876e\" (UID: \"fca59eb5-f6b8-4165-8068-e76ac6dd876e\") " Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.092893 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fca59eb5-f6b8-4165-8068-e76ac6dd876e-catalog-content\") pod \"fca59eb5-f6b8-4165-8068-e76ac6dd876e\" (UID: \"fca59eb5-f6b8-4165-8068-e76ac6dd876e\") " Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.092970 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrdcj\" (UniqueName: \"kubernetes.io/projected/fca59eb5-f6b8-4165-8068-e76ac6dd876e-kube-api-access-qrdcj\") pod \"fca59eb5-f6b8-4165-8068-e76ac6dd876e\" (UID: \"fca59eb5-f6b8-4165-8068-e76ac6dd876e\") " Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.094066 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fca59eb5-f6b8-4165-8068-e76ac6dd876e-utilities" (OuterVolumeSpecName: "utilities") pod "fca59eb5-f6b8-4165-8068-e76ac6dd876e" (UID: "fca59eb5-f6b8-4165-8068-e76ac6dd876e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.094465 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fca59eb5-f6b8-4165-8068-e76ac6dd876e-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.105209 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fca59eb5-f6b8-4165-8068-e76ac6dd876e-kube-api-access-qrdcj" (OuterVolumeSpecName: "kube-api-access-qrdcj") pod "fca59eb5-f6b8-4165-8068-e76ac6dd876e" (UID: "fca59eb5-f6b8-4165-8068-e76ac6dd876e"). InnerVolumeSpecName "kube-api-access-qrdcj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.145045 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fca59eb5-f6b8-4165-8068-e76ac6dd876e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fca59eb5-f6b8-4165-8068-e76ac6dd876e" (UID: "fca59eb5-f6b8-4165-8068-e76ac6dd876e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.196644 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrdcj\" (UniqueName: \"kubernetes.io/projected/fca59eb5-f6b8-4165-8068-e76ac6dd876e-kube-api-access-qrdcj\") on node \"crc\" DevicePath \"\"" Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.196675 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fca59eb5-f6b8-4165-8068-e76ac6dd876e-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.537431 4906 generic.go:334] "Generic (PLEG): container finished" podID="fca59eb5-f6b8-4165-8068-e76ac6dd876e" containerID="b8f489d13948162c561d6cf6ed04f235f9d3155f41c94ca1072f58b1390a180d" exitCode=0 Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.537523 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2zmp7" event={"ID":"fca59eb5-f6b8-4165-8068-e76ac6dd876e","Type":"ContainerDied","Data":"b8f489d13948162c561d6cf6ed04f235f9d3155f41c94ca1072f58b1390a180d"} Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.537574 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2zmp7" event={"ID":"fca59eb5-f6b8-4165-8068-e76ac6dd876e","Type":"ContainerDied","Data":"d10cf2faeed8e99b4ddeb506b6113eca5280bfd7eec855708212549032d48afe"} Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.537626 4906 scope.go:117] "RemoveContainer" containerID="b8f489d13948162c561d6cf6ed04f235f9d3155f41c94ca1072f58b1390a180d" Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.538022 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2zmp7" Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.567397 4906 scope.go:117] "RemoveContainer" containerID="768ee84739a769391bfe88ad22528ecd1a222ffce433ad61292dbc9f4c9b9815" Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.603429 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2zmp7"] Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.610937 4906 scope.go:117] "RemoveContainer" containerID="e00c2a1ad74a9ecc6be2c1357ff1e9e5d7bb8076484fca58b8d6324ea40fe574" Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.615607 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2zmp7"] Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.643424 4906 scope.go:117] "RemoveContainer" containerID="b8f489d13948162c561d6cf6ed04f235f9d3155f41c94ca1072f58b1390a180d" Feb 27 09:19:59 crc kubenswrapper[4906]: E0227 09:19:59.643985 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8f489d13948162c561d6cf6ed04f235f9d3155f41c94ca1072f58b1390a180d\": container with ID starting with b8f489d13948162c561d6cf6ed04f235f9d3155f41c94ca1072f58b1390a180d not found: ID does not exist" containerID="b8f489d13948162c561d6cf6ed04f235f9d3155f41c94ca1072f58b1390a180d" Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.644027 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8f489d13948162c561d6cf6ed04f235f9d3155f41c94ca1072f58b1390a180d"} err="failed to get container status \"b8f489d13948162c561d6cf6ed04f235f9d3155f41c94ca1072f58b1390a180d\": rpc error: code = NotFound desc = could not find container \"b8f489d13948162c561d6cf6ed04f235f9d3155f41c94ca1072f58b1390a180d\": container with ID starting with b8f489d13948162c561d6cf6ed04f235f9d3155f41c94ca1072f58b1390a180d not found: ID does not exist" Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.644055 4906 scope.go:117] "RemoveContainer" containerID="768ee84739a769391bfe88ad22528ecd1a222ffce433ad61292dbc9f4c9b9815" Feb 27 09:19:59 crc kubenswrapper[4906]: E0227 09:19:59.645658 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"768ee84739a769391bfe88ad22528ecd1a222ffce433ad61292dbc9f4c9b9815\": container with ID starting with 768ee84739a769391bfe88ad22528ecd1a222ffce433ad61292dbc9f4c9b9815 not found: ID does not exist" containerID="768ee84739a769391bfe88ad22528ecd1a222ffce433ad61292dbc9f4c9b9815" Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.645719 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"768ee84739a769391bfe88ad22528ecd1a222ffce433ad61292dbc9f4c9b9815"} err="failed to get container status \"768ee84739a769391bfe88ad22528ecd1a222ffce433ad61292dbc9f4c9b9815\": rpc error: code = NotFound desc = could not find container \"768ee84739a769391bfe88ad22528ecd1a222ffce433ad61292dbc9f4c9b9815\": container with ID starting with 768ee84739a769391bfe88ad22528ecd1a222ffce433ad61292dbc9f4c9b9815 not found: ID does not exist" Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.645763 4906 scope.go:117] "RemoveContainer" containerID="e00c2a1ad74a9ecc6be2c1357ff1e9e5d7bb8076484fca58b8d6324ea40fe574" Feb 27 09:19:59 crc kubenswrapper[4906]: E0227 09:19:59.646284 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e00c2a1ad74a9ecc6be2c1357ff1e9e5d7bb8076484fca58b8d6324ea40fe574\": container with ID starting with e00c2a1ad74a9ecc6be2c1357ff1e9e5d7bb8076484fca58b8d6324ea40fe574 not found: ID does not exist" containerID="e00c2a1ad74a9ecc6be2c1357ff1e9e5d7bb8076484fca58b8d6324ea40fe574" Feb 27 09:19:59 crc kubenswrapper[4906]: I0227 09:19:59.646313 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e00c2a1ad74a9ecc6be2c1357ff1e9e5d7bb8076484fca58b8d6324ea40fe574"} err="failed to get container status \"e00c2a1ad74a9ecc6be2c1357ff1e9e5d7bb8076484fca58b8d6324ea40fe574\": rpc error: code = NotFound desc = could not find container \"e00c2a1ad74a9ecc6be2c1357ff1e9e5d7bb8076484fca58b8d6324ea40fe574\": container with ID starting with e00c2a1ad74a9ecc6be2c1357ff1e9e5d7bb8076484fca58b8d6324ea40fe574 not found: ID does not exist" Feb 27 09:20:00 crc kubenswrapper[4906]: I0227 09:20:00.168776 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536400-dphzm"] Feb 27 09:20:00 crc kubenswrapper[4906]: E0227 09:20:00.169819 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fca59eb5-f6b8-4165-8068-e76ac6dd876e" containerName="registry-server" Feb 27 09:20:00 crc kubenswrapper[4906]: I0227 09:20:00.169851 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="fca59eb5-f6b8-4165-8068-e76ac6dd876e" containerName="registry-server" Feb 27 09:20:00 crc kubenswrapper[4906]: E0227 09:20:00.169916 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fca59eb5-f6b8-4165-8068-e76ac6dd876e" containerName="extract-content" Feb 27 09:20:00 crc kubenswrapper[4906]: I0227 09:20:00.169933 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="fca59eb5-f6b8-4165-8068-e76ac6dd876e" containerName="extract-content" Feb 27 09:20:00 crc kubenswrapper[4906]: E0227 09:20:00.169986 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fca59eb5-f6b8-4165-8068-e76ac6dd876e" containerName="extract-utilities" Feb 27 09:20:00 crc kubenswrapper[4906]: I0227 09:20:00.169999 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="fca59eb5-f6b8-4165-8068-e76ac6dd876e" containerName="extract-utilities" Feb 27 09:20:00 crc kubenswrapper[4906]: I0227 09:20:00.170314 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="fca59eb5-f6b8-4165-8068-e76ac6dd876e" containerName="registry-server" Feb 27 09:20:00 crc kubenswrapper[4906]: I0227 09:20:00.171739 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536400-dphzm" Feb 27 09:20:00 crc kubenswrapper[4906]: I0227 09:20:00.177794 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 09:20:00 crc kubenswrapper[4906]: I0227 09:20:00.177827 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 09:20:00 crc kubenswrapper[4906]: I0227 09:20:00.178036 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 09:20:00 crc kubenswrapper[4906]: I0227 09:20:00.195404 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536400-dphzm"] Feb 27 09:20:00 crc kubenswrapper[4906]: I0227 09:20:00.321931 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65hpc\" (UniqueName: \"kubernetes.io/projected/0f1e0390-8255-4e15-9615-64695bd30b6f-kube-api-access-65hpc\") pod \"auto-csr-approver-29536400-dphzm\" (UID: \"0f1e0390-8255-4e15-9615-64695bd30b6f\") " pod="openshift-infra/auto-csr-approver-29536400-dphzm" Feb 27 09:20:00 crc kubenswrapper[4906]: I0227 09:20:00.424300 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65hpc\" (UniqueName: \"kubernetes.io/projected/0f1e0390-8255-4e15-9615-64695bd30b6f-kube-api-access-65hpc\") pod \"auto-csr-approver-29536400-dphzm\" (UID: \"0f1e0390-8255-4e15-9615-64695bd30b6f\") " pod="openshift-infra/auto-csr-approver-29536400-dphzm" Feb 27 09:20:00 crc kubenswrapper[4906]: I0227 09:20:00.448604 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65hpc\" (UniqueName: \"kubernetes.io/projected/0f1e0390-8255-4e15-9615-64695bd30b6f-kube-api-access-65hpc\") pod \"auto-csr-approver-29536400-dphzm\" (UID: \"0f1e0390-8255-4e15-9615-64695bd30b6f\") " pod="openshift-infra/auto-csr-approver-29536400-dphzm" Feb 27 09:20:00 crc kubenswrapper[4906]: I0227 09:20:00.502457 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536400-dphzm" Feb 27 09:20:00 crc kubenswrapper[4906]: I0227 09:20:00.567743 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fca59eb5-f6b8-4165-8068-e76ac6dd876e" path="/var/lib/kubelet/pods/fca59eb5-f6b8-4165-8068-e76ac6dd876e/volumes" Feb 27 09:20:00 crc kubenswrapper[4906]: I0227 09:20:00.983327 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536400-dphzm"] Feb 27 09:20:01 crc kubenswrapper[4906]: I0227 09:20:01.562534 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536400-dphzm" event={"ID":"0f1e0390-8255-4e15-9615-64695bd30b6f","Type":"ContainerStarted","Data":"49091af4aa48d374b9f623af229b4f66f7a8d00c16b68818b37134c81c4b6550"} Feb 27 09:20:02 crc kubenswrapper[4906]: I0227 09:20:02.574326 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536400-dphzm" event={"ID":"0f1e0390-8255-4e15-9615-64695bd30b6f","Type":"ContainerStarted","Data":"ac3f72a786acd3982115012720e9fae5635f7e1c0d83001794b57fcc7455eeed"} Feb 27 09:20:02 crc kubenswrapper[4906]: I0227 09:20:02.599528 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-infra/auto-csr-approver-29536400-dphzm" podStartSLOduration=1.55919359 podStartE2EDuration="2.599507398s" podCreationTimestamp="2026-02-27 09:20:00 +0000 UTC" firstStartedPulling="2026-02-27 09:20:00.995183135 +0000 UTC m=+3099.389584765" lastFinishedPulling="2026-02-27 09:20:02.035496963 +0000 UTC m=+3100.429898573" observedRunningTime="2026-02-27 09:20:02.590106478 +0000 UTC m=+3100.984508088" watchObservedRunningTime="2026-02-27 09:20:02.599507398 +0000 UTC m=+3100.993909008" Feb 27 09:20:03 crc kubenswrapper[4906]: I0227 09:20:03.583526 4906 generic.go:334] "Generic (PLEG): container finished" podID="0f1e0390-8255-4e15-9615-64695bd30b6f" containerID="ac3f72a786acd3982115012720e9fae5635f7e1c0d83001794b57fcc7455eeed" exitCode=0 Feb 27 09:20:03 crc kubenswrapper[4906]: I0227 09:20:03.583771 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536400-dphzm" event={"ID":"0f1e0390-8255-4e15-9615-64695bd30b6f","Type":"ContainerDied","Data":"ac3f72a786acd3982115012720e9fae5635f7e1c0d83001794b57fcc7455eeed"} Feb 27 09:20:04 crc kubenswrapper[4906]: I0227 09:20:04.919402 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536400-dphzm" Feb 27 09:20:05 crc kubenswrapper[4906]: I0227 09:20:05.014384 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65hpc\" (UniqueName: \"kubernetes.io/projected/0f1e0390-8255-4e15-9615-64695bd30b6f-kube-api-access-65hpc\") pod \"0f1e0390-8255-4e15-9615-64695bd30b6f\" (UID: \"0f1e0390-8255-4e15-9615-64695bd30b6f\") " Feb 27 09:20:05 crc kubenswrapper[4906]: I0227 09:20:05.020097 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f1e0390-8255-4e15-9615-64695bd30b6f-kube-api-access-65hpc" (OuterVolumeSpecName: "kube-api-access-65hpc") pod "0f1e0390-8255-4e15-9615-64695bd30b6f" (UID: "0f1e0390-8255-4e15-9615-64695bd30b6f"). InnerVolumeSpecName "kube-api-access-65hpc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:20:05 crc kubenswrapper[4906]: I0227 09:20:05.116573 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65hpc\" (UniqueName: \"kubernetes.io/projected/0f1e0390-8255-4e15-9615-64695bd30b6f-kube-api-access-65hpc\") on node \"crc\" DevicePath \"\"" Feb 27 09:20:05 crc kubenswrapper[4906]: I0227 09:20:05.600442 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536400-dphzm" event={"ID":"0f1e0390-8255-4e15-9615-64695bd30b6f","Type":"ContainerDied","Data":"49091af4aa48d374b9f623af229b4f66f7a8d00c16b68818b37134c81c4b6550"} Feb 27 09:20:05 crc kubenswrapper[4906]: I0227 09:20:05.600483 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="49091af4aa48d374b9f623af229b4f66f7a8d00c16b68818b37134c81c4b6550" Feb 27 09:20:05 crc kubenswrapper[4906]: I0227 09:20:05.600494 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536400-dphzm" Feb 27 09:20:05 crc kubenswrapper[4906]: I0227 09:20:05.643546 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536394-p2kc4"] Feb 27 09:20:05 crc kubenswrapper[4906]: I0227 09:20:05.650930 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536394-p2kc4"] Feb 27 09:20:06 crc kubenswrapper[4906]: I0227 09:20:06.564338 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f43478c-4a21-41ef-a8c3-79580edca361" path="/var/lib/kubelet/pods/2f43478c-4a21-41ef-a8c3-79580edca361/volumes" Feb 27 09:20:07 crc kubenswrapper[4906]: I0227 09:20:07.409651 4906 scope.go:117] "RemoveContainer" containerID="c3ffb47e11d18829f3862b745ead451846356fefd9446c9e6a59a5ec3730745e" Feb 27 09:20:25 crc kubenswrapper[4906]: I0227 09:20:25.798652 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-79595755d6-s8v98_d0ff3fbe-cb4c-4337-96a2-b2b621691c91/barbican-api/0.log" Feb 27 09:20:25 crc kubenswrapper[4906]: I0227 09:20:25.999367 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-79595755d6-s8v98_d0ff3fbe-cb4c-4337-96a2-b2b621691c91/barbican-api-log/0.log" Feb 27 09:20:26 crc kubenswrapper[4906]: I0227 09:20:26.071465 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-688678b65d-rntkp_8ba40093-618b-4802-807d-91b1686f98c6/barbican-keystone-listener/0.log" Feb 27 09:20:26 crc kubenswrapper[4906]: I0227 09:20:26.128016 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-688678b65d-rntkp_8ba40093-618b-4802-807d-91b1686f98c6/barbican-keystone-listener-log/0.log" Feb 27 09:20:26 crc kubenswrapper[4906]: I0227 09:20:26.277002 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6c9bf8484c-xcvbs_3afec785-161c-4ca3-bc22-0c958826c2db/barbican-worker-log/0.log" Feb 27 09:20:26 crc kubenswrapper[4906]: I0227 09:20:26.289272 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-6c9bf8484c-xcvbs_3afec785-161c-4ca3-bc22-0c958826c2db/barbican-worker/0.log" Feb 27 09:20:26 crc kubenswrapper[4906]: I0227 09:20:26.500987 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-rw2f4_42f73c0e-3d0e-4ba2-aa05-c1547471b938/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Feb 27 09:20:26 crc kubenswrapper[4906]: I0227 09:20:26.541689 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_385c1ece-0f96-4433-b8a4-8719a56f5697/ceilometer-central-agent/0.log" Feb 27 09:20:26 crc kubenswrapper[4906]: I0227 09:20:26.710298 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_385c1ece-0f96-4433-b8a4-8719a56f5697/proxy-httpd/0.log" Feb 27 09:20:26 crc kubenswrapper[4906]: I0227 09:20:26.717269 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_385c1ece-0f96-4433-b8a4-8719a56f5697/ceilometer-notification-agent/0.log" Feb 27 09:20:26 crc kubenswrapper[4906]: I0227 09:20:26.783999 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_385c1ece-0f96-4433-b8a4-8719a56f5697/sg-core/0.log" Feb 27 09:20:26 crc kubenswrapper[4906]: I0227 09:20:26.974735 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_00396512-9757-4e1d-b801-074ac259bab9/cinder-api-log/0.log" Feb 27 09:20:26 crc kubenswrapper[4906]: I0227 09:20:26.992738 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_00396512-9757-4e1d-b801-074ac259bab9/cinder-api/0.log" Feb 27 09:20:27 crc kubenswrapper[4906]: I0227 09:20:27.216393 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_16a6f839-050d-49e8-b788-3ff1f5e46329/probe/0.log" Feb 27 09:20:27 crc kubenswrapper[4906]: I0227 09:20:27.257325 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_16a6f839-050d-49e8-b788-3ff1f5e46329/cinder-scheduler/0.log" Feb 27 09:20:27 crc kubenswrapper[4906]: I0227 09:20:27.300820 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-t5f4n_e77dee0c-6c62-4257-b0cc-7c4befd35e69/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Feb 27 09:20:27 crc kubenswrapper[4906]: I0227 09:20:27.536010 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-nbzpj_5988b580-0d6e-4c0f-9843-7088d1329575/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Feb 27 09:20:27 crc kubenswrapper[4906]: I0227 09:20:27.579625 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6fb577f7d7-q2mns_4af634eb-0270-43d7-bd3a-20cbde94f1f9/init/0.log" Feb 27 09:20:27 crc kubenswrapper[4906]: I0227 09:20:27.748024 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6fb577f7d7-q2mns_4af634eb-0270-43d7-bd3a-20cbde94f1f9/init/0.log" Feb 27 09:20:27 crc kubenswrapper[4906]: I0227 09:20:27.872128 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-6fb577f7d7-q2mns_4af634eb-0270-43d7-bd3a-20cbde94f1f9/dnsmasq-dns/0.log" Feb 27 09:20:27 crc kubenswrapper[4906]: I0227 09:20:27.943125 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-ldxzj_e632f9dc-ad86-45fb-8fcb-a35d8a92b07a/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Feb 27 09:20:28 crc kubenswrapper[4906]: I0227 09:20:28.125022 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_4503410a-77c1-4da7-b599-f6746affaaf8/glance-log/0.log" Feb 27 09:20:28 crc kubenswrapper[4906]: I0227 09:20:28.181050 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_4503410a-77c1-4da7-b599-f6746affaaf8/glance-httpd/0.log" Feb 27 09:20:28 crc kubenswrapper[4906]: I0227 09:20:28.638592 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38/glance-log/0.log" Feb 27 09:20:28 crc kubenswrapper[4906]: I0227 09:20:28.697201 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_e187aa1d-87cd-4e3f-8c7c-ef15fbc50b38/glance-httpd/0.log" Feb 27 09:20:28 crc kubenswrapper[4906]: I0227 09:20:28.968164 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7f78987f9b-lzmw8_6dc6534b-d5ec-4c53-bfc1-aae2389e3755/horizon/0.log" Feb 27 09:20:29 crc kubenswrapper[4906]: I0227 09:20:29.013988 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-kjpm4_cce4f075-8cb4-4f72-a590-56f3d507eebf/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Feb 27 09:20:29 crc kubenswrapper[4906]: I0227 09:20:29.254123 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7f78987f9b-lzmw8_6dc6534b-d5ec-4c53-bfc1-aae2389e3755/horizon-log/0.log" Feb 27 09:20:29 crc kubenswrapper[4906]: I0227 09:20:29.255282 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-ff8w8_0d21edbc-e0a7-453e-81c7-ebb897fa20fb/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Feb 27 09:20:29 crc kubenswrapper[4906]: I0227 09:20:29.554536 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-6d6854c956-6hqvk_6ca291d5-b655-4e18-8bc7-738194a54582/keystone-api/0.log" Feb 27 09:20:29 crc kubenswrapper[4906]: I0227 09:20:29.632287 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29536381-qq67w_358c1490-223c-4b95-87fb-279305744869/keystone-cron/0.log" Feb 27 09:20:29 crc kubenswrapper[4906]: I0227 09:20:29.889078 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_5f98d1e1-977f-4e20-86c4-e4e580c01f54/kube-state-metrics/0.log" Feb 27 09:20:29 crc kubenswrapper[4906]: I0227 09:20:29.975557 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-qpdm5_a0093a0e-b072-4131-a483-ffb3b8858f51/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Feb 27 09:20:30 crc kubenswrapper[4906]: I0227 09:20:30.210413 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-796c549d8f-qpw49_1774e857-2c80-489f-8985-11398d1727be/neutron-api/0.log" Feb 27 09:20:30 crc kubenswrapper[4906]: I0227 09:20:30.225004 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-796c549d8f-qpw49_1774e857-2c80-489f-8985-11398d1727be/neutron-httpd/0.log" Feb 27 09:20:30 crc kubenswrapper[4906]: I0227 09:20:30.524260 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-4lhtd_2306aef3-5469-438c-a3fb-0a0b987c7372/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Feb 27 09:20:30 crc kubenswrapper[4906]: I0227 09:20:30.832562 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_42ba6f7d-0f99-4b31-82bc-4366cec7c4a4/nova-api-log/0.log" Feb 27 09:20:30 crc kubenswrapper[4906]: I0227 09:20:30.861896 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_42ba6f7d-0f99-4b31-82bc-4366cec7c4a4/nova-api-api/0.log" Feb 27 09:20:30 crc kubenswrapper[4906]: I0227 09:20:30.970126 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_1fe8c767-11f8-4a04-aab7-940c1b55a7b5/nova-cell0-conductor-conductor/0.log" Feb 27 09:20:31 crc kubenswrapper[4906]: I0227 09:20:31.140328 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_389f79ec-91c1-40e2-9076-771f9eacb628/nova-cell1-conductor-conductor/0.log" Feb 27 09:20:31 crc kubenswrapper[4906]: I0227 09:20:31.304382 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_460d94e6-b4e6-4248-9191-a5930f468875/nova-cell1-novncproxy-novncproxy/0.log" Feb 27 09:20:31 crc kubenswrapper[4906]: I0227 09:20:31.397431 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-sqg9z_f9df5a61-7fb3-4eb6-adc5-75d074d56a0d/nova-edpm-deployment-openstack-edpm-ipam/0.log" Feb 27 09:20:31 crc kubenswrapper[4906]: I0227 09:20:31.642055 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_a8173ca1-28e6-49da-a3f4-4b8ebaf3e551/nova-metadata-log/0.log" Feb 27 09:20:31 crc kubenswrapper[4906]: I0227 09:20:31.866352 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_3dfcdaf9-e14b-4fe9-a800-eb8998342762/nova-scheduler-scheduler/0.log" Feb 27 09:20:31 crc kubenswrapper[4906]: I0227 09:20:31.884124 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_960971c6-e3d1-458e-9991-91cbcbeb9d5e/mysql-bootstrap/0.log" Feb 27 09:20:32 crc kubenswrapper[4906]: I0227 09:20:32.098218 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_960971c6-e3d1-458e-9991-91cbcbeb9d5e/galera/0.log" Feb 27 09:20:32 crc kubenswrapper[4906]: I0227 09:20:32.101973 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_960971c6-e3d1-458e-9991-91cbcbeb9d5e/mysql-bootstrap/0.log" Feb 27 09:20:32 crc kubenswrapper[4906]: I0227 09:20:32.322983 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b/mysql-bootstrap/0.log" Feb 27 09:20:32 crc kubenswrapper[4906]: I0227 09:20:32.499456 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_a8173ca1-28e6-49da-a3f4-4b8ebaf3e551/nova-metadata-metadata/0.log" Feb 27 09:20:32 crc kubenswrapper[4906]: I0227 09:20:32.553101 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b/mysql-bootstrap/0.log" Feb 27 09:20:32 crc kubenswrapper[4906]: I0227 09:20:32.566209 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_47e4a6a6-edd3-4ef7-b558-024cb2ddcf4b/galera/0.log" Feb 27 09:20:32 crc kubenswrapper[4906]: I0227 09:20:32.723959 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_e4bf0f4a-8f16-4255-8d40-37826771ba47/openstackclient/0.log" Feb 27 09:20:32 crc kubenswrapper[4906]: I0227 09:20:32.784114 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-67lpp_bd97ec8d-9d8f-4817-9770-d5392d4f60df/ovn-controller/0.log" Feb 27 09:20:32 crc kubenswrapper[4906]: I0227 09:20:32.959366 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-7zlfd_03e70640-9aec-465b-9141-7944e2a7aeb1/openstack-network-exporter/0.log" Feb 27 09:20:33 crc kubenswrapper[4906]: I0227 09:20:33.024548 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-h6fvw_8bff3f91-e831-4a15-a078-639483433b26/ovsdb-server-init/0.log" Feb 27 09:20:33 crc kubenswrapper[4906]: I0227 09:20:33.303283 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-h6fvw_8bff3f91-e831-4a15-a078-639483433b26/ovsdb-server/0.log" Feb 27 09:20:33 crc kubenswrapper[4906]: I0227 09:20:33.324360 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-h6fvw_8bff3f91-e831-4a15-a078-639483433b26/ovsdb-server-init/0.log" Feb 27 09:20:33 crc kubenswrapper[4906]: I0227 09:20:33.357961 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-h6fvw_8bff3f91-e831-4a15-a078-639483433b26/ovs-vswitchd/0.log" Feb 27 09:20:33 crc kubenswrapper[4906]: I0227 09:20:33.544699 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_24bec749-145e-48db-b5f8-ae7b57a5aaa3/openstack-network-exporter/0.log" Feb 27 09:20:33 crc kubenswrapper[4906]: I0227 09:20:33.565209 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-57m99_00a360db-bbc2-40f9-a12a-0b8af451cb3c/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Feb 27 09:20:33 crc kubenswrapper[4906]: I0227 09:20:33.643310 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_24bec749-145e-48db-b5f8-ae7b57a5aaa3/ovn-northd/0.log" Feb 27 09:20:33 crc kubenswrapper[4906]: I0227 09:20:33.807822 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_19046133-de5e-4303-b576-cd539ee5d3ae/openstack-network-exporter/0.log" Feb 27 09:20:33 crc kubenswrapper[4906]: I0227 09:20:33.875458 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_19046133-de5e-4303-b576-cd539ee5d3ae/ovsdbserver-nb/0.log" Feb 27 09:20:34 crc kubenswrapper[4906]: I0227 09:20:34.029576 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_cfe60c7c-9d0a-488c-bb9a-9a1a8511437f/openstack-network-exporter/0.log" Feb 27 09:20:34 crc kubenswrapper[4906]: I0227 09:20:34.109743 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_cfe60c7c-9d0a-488c-bb9a-9a1a8511437f/ovsdbserver-sb/0.log" Feb 27 09:20:34 crc kubenswrapper[4906]: I0227 09:20:34.216105 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-59d7bdb8d4-m9nsb_822cf36a-2c4f-4f54-a927-0adbc66b230f/placement-api/0.log" Feb 27 09:20:34 crc kubenswrapper[4906]: I0227 09:20:34.300674 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-59d7bdb8d4-m9nsb_822cf36a-2c4f-4f54-a927-0adbc66b230f/placement-log/0.log" Feb 27 09:20:34 crc kubenswrapper[4906]: I0227 09:20:34.470211 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_ca80b5ea-d488-457b-b5f7-1be76770223e/setup-container/0.log" Feb 27 09:20:34 crc kubenswrapper[4906]: I0227 09:20:34.700595 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_ca80b5ea-d488-457b-b5f7-1be76770223e/rabbitmq/0.log" Feb 27 09:20:34 crc kubenswrapper[4906]: I0227 09:20:34.722092 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_ca80b5ea-d488-457b-b5f7-1be76770223e/setup-container/0.log" Feb 27 09:20:34 crc kubenswrapper[4906]: I0227 09:20:34.734727 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_514e9bcd-1026-4d1c-a641-ce105057f1bf/setup-container/0.log" Feb 27 09:20:34 crc kubenswrapper[4906]: I0227 09:20:34.955962 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_514e9bcd-1026-4d1c-a641-ce105057f1bf/setup-container/0.log" Feb 27 09:20:35 crc kubenswrapper[4906]: I0227 09:20:35.042844 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_514e9bcd-1026-4d1c-a641-ce105057f1bf/rabbitmq/0.log" Feb 27 09:20:35 crc kubenswrapper[4906]: I0227 09:20:35.392129 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-nnr7s_e27eec9c-ffe0-409a-95c6-ebbf293c7a7d/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Feb 27 09:20:35 crc kubenswrapper[4906]: I0227 09:20:35.477160 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-p5bw8_f0a22a37-97f6-42ad-ba15-d6c8a352d831/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Feb 27 09:20:35 crc kubenswrapper[4906]: I0227 09:20:35.721593 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-8ns9l_878d7d32-bc60-4edf-aa59-82548a53fe4c/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Feb 27 09:20:35 crc kubenswrapper[4906]: I0227 09:20:35.763353 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-hzn5t_5f8dfe1f-95da-435f-aedf-7319de0cea38/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Feb 27 09:20:36 crc kubenswrapper[4906]: I0227 09:20:36.001947 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-z2blf_4ac686ba-92d6-4672-bd2e-936f1e9d15ba/ssh-known-hosts-edpm-deployment/0.log" Feb 27 09:20:36 crc kubenswrapper[4906]: I0227 09:20:36.089192 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-8d74fdd59-xdxbd_b9e7b71f-1494-49d7-9c59-a5de95c6f7a6/proxy-server/0.log" Feb 27 09:20:36 crc kubenswrapper[4906]: I0227 09:20:36.095584 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-8d74fdd59-xdxbd_b9e7b71f-1494-49d7-9c59-a5de95c6f7a6/proxy-httpd/0.log" Feb 27 09:20:36 crc kubenswrapper[4906]: I0227 09:20:36.356445 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-6jk9r_db586541-2471-4a37-a7b6-3c8f324a696b/swift-ring-rebalance/0.log" Feb 27 09:20:36 crc kubenswrapper[4906]: I0227 09:20:36.412566 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c98486bd-1325-4072-bce0-a28d38ecead2/account-auditor/0.log" Feb 27 09:20:36 crc kubenswrapper[4906]: I0227 09:20:36.469187 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c98486bd-1325-4072-bce0-a28d38ecead2/account-reaper/0.log" Feb 27 09:20:36 crc kubenswrapper[4906]: I0227 09:20:36.608925 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c98486bd-1325-4072-bce0-a28d38ecead2/account-server/0.log" Feb 27 09:20:36 crc kubenswrapper[4906]: I0227 09:20:36.621964 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c98486bd-1325-4072-bce0-a28d38ecead2/account-replicator/0.log" Feb 27 09:20:36 crc kubenswrapper[4906]: I0227 09:20:36.622594 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c98486bd-1325-4072-bce0-a28d38ecead2/container-auditor/0.log" Feb 27 09:20:36 crc kubenswrapper[4906]: I0227 09:20:36.738498 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c98486bd-1325-4072-bce0-a28d38ecead2/container-replicator/0.log" Feb 27 09:20:36 crc kubenswrapper[4906]: I0227 09:20:36.844318 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c98486bd-1325-4072-bce0-a28d38ecead2/container-server/0.log" Feb 27 09:20:36 crc kubenswrapper[4906]: I0227 09:20:36.854289 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c98486bd-1325-4072-bce0-a28d38ecead2/container-updater/0.log" Feb 27 09:20:36 crc kubenswrapper[4906]: I0227 09:20:36.910171 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c98486bd-1325-4072-bce0-a28d38ecead2/object-auditor/0.log" Feb 27 09:20:37 crc kubenswrapper[4906]: I0227 09:20:37.069269 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c98486bd-1325-4072-bce0-a28d38ecead2/object-expirer/0.log" Feb 27 09:20:37 crc kubenswrapper[4906]: I0227 09:20:37.110774 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c98486bd-1325-4072-bce0-a28d38ecead2/object-replicator/0.log" Feb 27 09:20:37 crc kubenswrapper[4906]: I0227 09:20:37.112117 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c98486bd-1325-4072-bce0-a28d38ecead2/object-server/0.log" Feb 27 09:20:37 crc kubenswrapper[4906]: I0227 09:20:37.139890 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c98486bd-1325-4072-bce0-a28d38ecead2/object-updater/0.log" Feb 27 09:20:37 crc kubenswrapper[4906]: I0227 09:20:37.308922 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c98486bd-1325-4072-bce0-a28d38ecead2/rsync/0.log" Feb 27 09:20:37 crc kubenswrapper[4906]: I0227 09:20:37.376937 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_c98486bd-1325-4072-bce0-a28d38ecead2/swift-recon-cron/0.log" Feb 27 09:20:37 crc kubenswrapper[4906]: I0227 09:20:37.472595 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-jkswf_a02ebde9-1894-4df1-a904-7d898d684871/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Feb 27 09:20:37 crc kubenswrapper[4906]: I0227 09:20:37.640511 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_006c37d3-09e7-4ee5-aedf-8e3ea8049043/tempest-tests-tempest-tests-runner/0.log" Feb 27 09:20:37 crc kubenswrapper[4906]: I0227 09:20:37.837898 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_81933748-225d-4873-a7ba-4c2a3e91d54a/test-operator-logs-container/0.log" Feb 27 09:20:37 crc kubenswrapper[4906]: I0227 09:20:37.936993 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-tmzq7_953a02f5-56dc-4fe0-b20f-158522e6d7d9/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Feb 27 09:20:42 crc kubenswrapper[4906]: I0227 09:20:42.758261 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_e678f55d-5f6a-4ce7-92f3-5a7b87803830/memcached/0.log" Feb 27 09:20:54 crc kubenswrapper[4906]: I0227 09:20:54.844143 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 09:20:54 crc kubenswrapper[4906]: I0227 09:20:54.844704 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 09:21:02 crc kubenswrapper[4906]: I0227 09:21:02.881751 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n_bcec6a95-1051-45a5-9dc0-156dff89a709/util/0.log" Feb 27 09:21:03 crc kubenswrapper[4906]: I0227 09:21:03.117547 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n_bcec6a95-1051-45a5-9dc0-156dff89a709/util/0.log" Feb 27 09:21:03 crc kubenswrapper[4906]: I0227 09:21:03.120557 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n_bcec6a95-1051-45a5-9dc0-156dff89a709/pull/0.log" Feb 27 09:21:03 crc kubenswrapper[4906]: I0227 09:21:03.129724 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n_bcec6a95-1051-45a5-9dc0-156dff89a709/pull/0.log" Feb 27 09:21:03 crc kubenswrapper[4906]: I0227 09:21:03.326423 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n_bcec6a95-1051-45a5-9dc0-156dff89a709/util/0.log" Feb 27 09:21:03 crc kubenswrapper[4906]: I0227 09:21:03.333177 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n_bcec6a95-1051-45a5-9dc0-156dff89a709/pull/0.log" Feb 27 09:21:03 crc kubenswrapper[4906]: I0227 09:21:03.356556 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_599de16dbab77a1b92b2d12e8f9c6262bea91d2ad8883bf4d6efe56c02s7k7n_bcec6a95-1051-45a5-9dc0-156dff89a709/extract/0.log" Feb 27 09:21:03 crc kubenswrapper[4906]: I0227 09:21:03.775460 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-5d87c9d997-8q7nm_1bb840aa-a248-4f16-8b8e-2710d728a7f8/manager/0.log" Feb 27 09:21:04 crc kubenswrapper[4906]: I0227 09:21:04.300382 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-64db6967f8-bvglp_b80a0b4d-87b7-4185-94b6-4524d830f149/manager/0.log" Feb 27 09:21:04 crc kubenswrapper[4906]: I0227 09:21:04.498420 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-cf99c678f-jn9zw_aba839b0-d1ee-454e-b138-4e3656ea150d/manager/0.log" Feb 27 09:21:04 crc kubenswrapper[4906]: I0227 09:21:04.767849 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-78bc7f9bd9-pvjd6_e438f213-61e7-4ce1-9d68-d14e4121ba26/manager/0.log" Feb 27 09:21:05 crc kubenswrapper[4906]: I0227 09:21:05.043324 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-55d77d7b5c-2t8hl_d22e292e-57c3-4fc3-8730-813b100aa442/manager/0.log" Feb 27 09:21:05 crc kubenswrapper[4906]: I0227 09:21:05.238604 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-545456dc4-5x4jc_349bcf77-0fc4-4048-a66d-696798c3a6d4/manager/0.log" Feb 27 09:21:05 crc kubenswrapper[4906]: I0227 09:21:05.343582 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-f7fcc58b9-xs4j4_fa05256a-5601-4ac3-873d-eb58bd232401/manager/0.log" Feb 27 09:21:05 crc kubenswrapper[4906]: I0227 09:21:05.573634 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-67d996989d-wfwll_5ff960a3-98d8-4d3f-9116-2a0785aefb2e/manager/0.log" Feb 27 09:21:05 crc kubenswrapper[4906]: I0227 09:21:05.626291 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-55ffd4876b-wtrsc_6d51bdfc-e48a-44ff-a56c-9400e320fa7f/manager/0.log" Feb 27 09:21:05 crc kubenswrapper[4906]: I0227 09:21:05.875378 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-556b8b874-fpnbv_a9932342-3a2d-4621-b4f1-048d92eef4c2/manager/0.log" Feb 27 09:21:06 crc kubenswrapper[4906]: I0227 09:21:06.069286 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-54688575f-5w6ls_5b1250ff-45df-43fc-a9fc-fa364b823c16/manager/0.log" Feb 27 09:21:06 crc kubenswrapper[4906]: I0227 09:21:06.294822 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-74b6b5dc96-lsm2z_ec68c3b0-bb17-4c88-a478-e13e49063c7f/manager/0.log" Feb 27 09:21:06 crc kubenswrapper[4906]: I0227 09:21:06.388657 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-5d86c7ddb7-5h7k9_c12e7f2b-60e4-4bb5-9b11-3ae935c649c2/manager/0.log" Feb 27 09:21:06 crc kubenswrapper[4906]: I0227 09:21:06.627436 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-7c6767dc9cfh77b_775eec2f-3f17-4413-b454-0248b5cb7817/manager/0.log" Feb 27 09:21:07 crc kubenswrapper[4906]: I0227 09:21:07.066865 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-bm4k8_f5ec0807-fc16-427f-99be-2cc63d0ceb23/registry-server/0.log" Feb 27 09:21:07 crc kubenswrapper[4906]: I0227 09:21:07.105080 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-5fb5699c68-bb7nb_05a6722c-3dc2-412e-aa57-9f5201e6987e/operator/0.log" Feb 27 09:21:07 crc kubenswrapper[4906]: I0227 09:21:07.403337 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-75684d597f-q59sz_ab4168c1-442a-4218-bd19-a0194e2b4e59/manager/0.log" Feb 27 09:21:07 crc kubenswrapper[4906]: I0227 09:21:07.558749 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-648564c9fc-p8w7q_39722076-5ed2-4e53-bb1d-d2a8bc73b825/manager/0.log" Feb 27 09:21:07 crc kubenswrapper[4906]: I0227 09:21:07.671628 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-pppgx_37c2354e-5123-4644-ac6e-416ab22ecde4/operator/0.log" Feb 27 09:21:07 crc kubenswrapper[4906]: I0227 09:21:07.982305 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-9b9ff9f4d-sdhtz_470d801d-7d83-4b70-ba47-f2d93ef9ebfc/manager/0.log" Feb 27 09:21:08 crc kubenswrapper[4906]: I0227 09:21:08.167172 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5fdb694969-28cxl_52d0c5f6-ad2b-460d-8a8d-a9f4bc3fed06/manager/0.log" Feb 27 09:21:08 crc kubenswrapper[4906]: I0227 09:21:08.211216 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-55b5ff4dbb-s9mn2_4b06e9fa-a6b4-4277-a4ea-d0724bc40002/manager/0.log" Feb 27 09:21:08 crc kubenswrapper[4906]: I0227 09:21:08.622504 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-bccc79885-p8gxp_dfcc34b3-13fc-4a7f-ab38-45744608591e/manager/0.log" Feb 27 09:21:08 crc kubenswrapper[4906]: I0227 09:21:08.647281 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-6dfddd8f7d-xcp5b_c9a5ef1b-f518-41c0-a30d-d0a5d2e3321c/manager/0.log" Feb 27 09:21:10 crc kubenswrapper[4906]: I0227 09:21:10.024904 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6db6876945-cq6ns_b54e196b-1f4b-4121-821f-a6751aef49ed/manager/0.log" Feb 27 09:21:24 crc kubenswrapper[4906]: I0227 09:21:24.844856 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 09:21:24 crc kubenswrapper[4906]: I0227 09:21:24.846013 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 09:21:28 crc kubenswrapper[4906]: I0227 09:21:28.476606 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-p78ck_d217f1ee-f917-4ac1-bf8d-c8a011d42ebc/control-plane-machine-set-operator/0.log" Feb 27 09:21:28 crc kubenswrapper[4906]: I0227 09:21:28.570949 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-6lqtc_ea522559-4f3c-4d90-ae46-aa2a9f27b243/kube-rbac-proxy/0.log" Feb 27 09:21:28 crc kubenswrapper[4906]: I0227 09:21:28.603468 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-6lqtc_ea522559-4f3c-4d90-ae46-aa2a9f27b243/machine-api-operator/0.log" Feb 27 09:21:41 crc kubenswrapper[4906]: I0227 09:21:41.665680 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-858654f9db-c6tfn_45013531-55f5-4b7c-88e3-71a927eaed69/cert-manager-controller/0.log" Feb 27 09:21:41 crc kubenswrapper[4906]: I0227 09:21:41.875864 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-cf98fcc89-fxldv_53526f83-4176-421a-9043-9a7839413714/cert-manager-cainjector/0.log" Feb 27 09:21:41 crc kubenswrapper[4906]: I0227 09:21:41.937450 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-687f57d79b-q2shx_f98e1bf6-2c42-482c-9e2d-77f9fb0a572c/cert-manager-webhook/0.log" Feb 27 09:21:54 crc kubenswrapper[4906]: I0227 09:21:54.424970 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-5dcbbd79cf-jvbg5_8c34014e-ec30-40e8-ad99-e88f13beccfc/nmstate-console-plugin/0.log" Feb 27 09:21:54 crc kubenswrapper[4906]: I0227 09:21:54.610707 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-h644p_9029ee95-172c-4b7f-b240-e6f54a9f8c0a/nmstate-handler/0.log" Feb 27 09:21:54 crc kubenswrapper[4906]: I0227 09:21:54.620092 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-69594cc75-9xjr4_5f0d3058-8b6c-45aa-84de-416a2f458647/kube-rbac-proxy/0.log" Feb 27 09:21:54 crc kubenswrapper[4906]: I0227 09:21:54.696435 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-69594cc75-9xjr4_5f0d3058-8b6c-45aa-84de-416a2f458647/nmstate-metrics/0.log" Feb 27 09:21:54 crc kubenswrapper[4906]: I0227 09:21:54.800818 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-75c5dccd6c-xbzkr_9cfa03e5-ea58-4c27-b123-24d93614151e/nmstate-operator/0.log" Feb 27 09:21:54 crc kubenswrapper[4906]: I0227 09:21:54.843990 4906 patch_prober.go:28] interesting pod/machine-config-daemon-2s5wg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 27 09:21:54 crc kubenswrapper[4906]: I0227 09:21:54.844043 4906 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 27 09:21:54 crc kubenswrapper[4906]: I0227 09:21:54.844093 4906 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" Feb 27 09:21:54 crc kubenswrapper[4906]: I0227 09:21:54.845027 4906 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1"} pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 27 09:21:54 crc kubenswrapper[4906]: I0227 09:21:54.845080 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerName="machine-config-daemon" containerID="cri-o://a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" gracePeriod=600 Feb 27 09:21:54 crc kubenswrapper[4906]: I0227 09:21:54.919059 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-786f45cff4-qvmtw_30e5e423-b09e-4ade-baa1-257731b5cc0b/nmstate-webhook/0.log" Feb 27 09:21:54 crc kubenswrapper[4906]: E0227 09:21:54.996364 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:21:55 crc kubenswrapper[4906]: I0227 09:21:55.653773 4906 generic.go:334] "Generic (PLEG): container finished" podID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" exitCode=0 Feb 27 09:21:55 crc kubenswrapper[4906]: I0227 09:21:55.653850 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerDied","Data":"a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1"} Feb 27 09:21:55 crc kubenswrapper[4906]: I0227 09:21:55.655170 4906 scope.go:117] "RemoveContainer" containerID="c0ecca24c3dfb89c4c60c4af6e708b2178e936671802de96e3da9b5747bcaeb0" Feb 27 09:21:55 crc kubenswrapper[4906]: I0227 09:21:55.655725 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:21:55 crc kubenswrapper[4906]: E0227 09:21:55.656143 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:22:00 crc kubenswrapper[4906]: I0227 09:22:00.164261 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536402-62zsc"] Feb 27 09:22:00 crc kubenswrapper[4906]: E0227 09:22:00.165661 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f1e0390-8255-4e15-9615-64695bd30b6f" containerName="oc" Feb 27 09:22:00 crc kubenswrapper[4906]: I0227 09:22:00.165681 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f1e0390-8255-4e15-9615-64695bd30b6f" containerName="oc" Feb 27 09:22:00 crc kubenswrapper[4906]: I0227 09:22:00.165934 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f1e0390-8255-4e15-9615-64695bd30b6f" containerName="oc" Feb 27 09:22:00 crc kubenswrapper[4906]: I0227 09:22:00.167043 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536402-62zsc" Feb 27 09:22:00 crc kubenswrapper[4906]: I0227 09:22:00.169567 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 09:22:00 crc kubenswrapper[4906]: I0227 09:22:00.171306 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 09:22:00 crc kubenswrapper[4906]: I0227 09:22:00.172247 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 09:22:00 crc kubenswrapper[4906]: I0227 09:22:00.178978 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536402-62zsc"] Feb 27 09:22:00 crc kubenswrapper[4906]: I0227 09:22:00.262308 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2t7kf\" (UniqueName: \"kubernetes.io/projected/7fc749ea-eaa6-4163-90e5-c4898efb6235-kube-api-access-2t7kf\") pod \"auto-csr-approver-29536402-62zsc\" (UID: \"7fc749ea-eaa6-4163-90e5-c4898efb6235\") " pod="openshift-infra/auto-csr-approver-29536402-62zsc" Feb 27 09:22:00 crc kubenswrapper[4906]: I0227 09:22:00.364911 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2t7kf\" (UniqueName: \"kubernetes.io/projected/7fc749ea-eaa6-4163-90e5-c4898efb6235-kube-api-access-2t7kf\") pod \"auto-csr-approver-29536402-62zsc\" (UID: \"7fc749ea-eaa6-4163-90e5-c4898efb6235\") " pod="openshift-infra/auto-csr-approver-29536402-62zsc" Feb 27 09:22:00 crc kubenswrapper[4906]: I0227 09:22:00.395761 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2t7kf\" (UniqueName: \"kubernetes.io/projected/7fc749ea-eaa6-4163-90e5-c4898efb6235-kube-api-access-2t7kf\") pod \"auto-csr-approver-29536402-62zsc\" (UID: \"7fc749ea-eaa6-4163-90e5-c4898efb6235\") " pod="openshift-infra/auto-csr-approver-29536402-62zsc" Feb 27 09:22:00 crc kubenswrapper[4906]: I0227 09:22:00.493097 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536402-62zsc" Feb 27 09:22:01 crc kubenswrapper[4906]: I0227 09:22:01.035199 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536402-62zsc"] Feb 27 09:22:01 crc kubenswrapper[4906]: I0227 09:22:01.038443 4906 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 27 09:22:01 crc kubenswrapper[4906]: I0227 09:22:01.758530 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536402-62zsc" event={"ID":"7fc749ea-eaa6-4163-90e5-c4898efb6235","Type":"ContainerStarted","Data":"9e96c8f6c17dbbfcf481b81bc2aee385f7803799014d9919190a3308ea0097f6"} Feb 27 09:22:02 crc kubenswrapper[4906]: I0227 09:22:02.775311 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536402-62zsc" event={"ID":"7fc749ea-eaa6-4163-90e5-c4898efb6235","Type":"ContainerStarted","Data":"381b73eea4b338439d951c5ac8cf43ff78ebecfdd10885a09d22310b802f9e77"} Feb 27 09:22:03 crc kubenswrapper[4906]: I0227 09:22:03.792206 4906 generic.go:334] "Generic (PLEG): container finished" podID="7fc749ea-eaa6-4163-90e5-c4898efb6235" containerID="381b73eea4b338439d951c5ac8cf43ff78ebecfdd10885a09d22310b802f9e77" exitCode=0 Feb 27 09:22:03 crc kubenswrapper[4906]: I0227 09:22:03.792332 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536402-62zsc" event={"ID":"7fc749ea-eaa6-4163-90e5-c4898efb6235","Type":"ContainerDied","Data":"381b73eea4b338439d951c5ac8cf43ff78ebecfdd10885a09d22310b802f9e77"} Feb 27 09:22:05 crc kubenswrapper[4906]: I0227 09:22:05.136500 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536402-62zsc" Feb 27 09:22:05 crc kubenswrapper[4906]: I0227 09:22:05.188542 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2t7kf\" (UniqueName: \"kubernetes.io/projected/7fc749ea-eaa6-4163-90e5-c4898efb6235-kube-api-access-2t7kf\") pod \"7fc749ea-eaa6-4163-90e5-c4898efb6235\" (UID: \"7fc749ea-eaa6-4163-90e5-c4898efb6235\") " Feb 27 09:22:05 crc kubenswrapper[4906]: I0227 09:22:05.198797 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7fc749ea-eaa6-4163-90e5-c4898efb6235-kube-api-access-2t7kf" (OuterVolumeSpecName: "kube-api-access-2t7kf") pod "7fc749ea-eaa6-4163-90e5-c4898efb6235" (UID: "7fc749ea-eaa6-4163-90e5-c4898efb6235"). InnerVolumeSpecName "kube-api-access-2t7kf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:22:05 crc kubenswrapper[4906]: I0227 09:22:05.290897 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2t7kf\" (UniqueName: \"kubernetes.io/projected/7fc749ea-eaa6-4163-90e5-c4898efb6235-kube-api-access-2t7kf\") on node \"crc\" DevicePath \"\"" Feb 27 09:22:05 crc kubenswrapper[4906]: I0227 09:22:05.656841 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536396-z667p"] Feb 27 09:22:05 crc kubenswrapper[4906]: I0227 09:22:05.664398 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536396-z667p"] Feb 27 09:22:05 crc kubenswrapper[4906]: I0227 09:22:05.814818 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536402-62zsc" event={"ID":"7fc749ea-eaa6-4163-90e5-c4898efb6235","Type":"ContainerDied","Data":"9e96c8f6c17dbbfcf481b81bc2aee385f7803799014d9919190a3308ea0097f6"} Feb 27 09:22:05 crc kubenswrapper[4906]: I0227 09:22:05.814904 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e96c8f6c17dbbfcf481b81bc2aee385f7803799014d9919190a3308ea0097f6" Feb 27 09:22:05 crc kubenswrapper[4906]: I0227 09:22:05.814980 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536402-62zsc" Feb 27 09:22:06 crc kubenswrapper[4906]: I0227 09:22:06.570664 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="894b2016-b019-41f6-81ea-5d12d397e99f" path="/var/lib/kubelet/pods/894b2016-b019-41f6-81ea-5d12d397e99f/volumes" Feb 27 09:22:07 crc kubenswrapper[4906]: I0227 09:22:07.607800 4906 scope.go:117] "RemoveContainer" containerID="b357b1f85a6f242e44ecad71d8e69bdc328a3bc611f51d50d4c4e4666e4106c3" Feb 27 09:22:10 crc kubenswrapper[4906]: I0227 09:22:10.556915 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:22:10 crc kubenswrapper[4906]: E0227 09:22:10.557522 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:22:21 crc kubenswrapper[4906]: I0227 09:22:21.553580 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:22:21 crc kubenswrapper[4906]: E0227 09:22:21.555159 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:22:24 crc kubenswrapper[4906]: I0227 09:22:24.617644 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-86ddb6bd46-qnp6d_a1f334d4-49ed-424f-9f86-6cc0ccfccca9/kube-rbac-proxy/0.log" Feb 27 09:22:24 crc kubenswrapper[4906]: I0227 09:22:24.737098 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-86ddb6bd46-qnp6d_a1f334d4-49ed-424f-9f86-6cc0ccfccca9/controller/0.log" Feb 27 09:22:24 crc kubenswrapper[4906]: I0227 09:22:24.852323 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7f989f654f-p2vgn_ad499997-74c4-4c13-a9f3-1dec95a2a087/frr-k8s-webhook-server/0.log" Feb 27 09:22:24 crc kubenswrapper[4906]: I0227 09:22:24.935640 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zws26_9c195ff8-f64c-4827-8826-dde8f2583e40/cp-frr-files/0.log" Feb 27 09:22:25 crc kubenswrapper[4906]: I0227 09:22:25.091292 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zws26_9c195ff8-f64c-4827-8826-dde8f2583e40/cp-frr-files/0.log" Feb 27 09:22:25 crc kubenswrapper[4906]: I0227 09:22:25.110518 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zws26_9c195ff8-f64c-4827-8826-dde8f2583e40/cp-metrics/0.log" Feb 27 09:22:25 crc kubenswrapper[4906]: I0227 09:22:25.136189 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zws26_9c195ff8-f64c-4827-8826-dde8f2583e40/cp-reloader/0.log" Feb 27 09:22:25 crc kubenswrapper[4906]: I0227 09:22:25.141002 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zws26_9c195ff8-f64c-4827-8826-dde8f2583e40/cp-reloader/0.log" Feb 27 09:22:25 crc kubenswrapper[4906]: I0227 09:22:25.306013 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zws26_9c195ff8-f64c-4827-8826-dde8f2583e40/cp-frr-files/0.log" Feb 27 09:22:25 crc kubenswrapper[4906]: I0227 09:22:25.306121 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zws26_9c195ff8-f64c-4827-8826-dde8f2583e40/cp-metrics/0.log" Feb 27 09:22:25 crc kubenswrapper[4906]: I0227 09:22:25.327650 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zws26_9c195ff8-f64c-4827-8826-dde8f2583e40/cp-metrics/0.log" Feb 27 09:22:25 crc kubenswrapper[4906]: I0227 09:22:25.334188 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zws26_9c195ff8-f64c-4827-8826-dde8f2583e40/cp-reloader/0.log" Feb 27 09:22:25 crc kubenswrapper[4906]: I0227 09:22:25.484002 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zws26_9c195ff8-f64c-4827-8826-dde8f2583e40/cp-reloader/0.log" Feb 27 09:22:25 crc kubenswrapper[4906]: I0227 09:22:25.496987 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zws26_9c195ff8-f64c-4827-8826-dde8f2583e40/cp-frr-files/0.log" Feb 27 09:22:25 crc kubenswrapper[4906]: I0227 09:22:25.516913 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zws26_9c195ff8-f64c-4827-8826-dde8f2583e40/cp-metrics/0.log" Feb 27 09:22:25 crc kubenswrapper[4906]: I0227 09:22:25.535278 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zws26_9c195ff8-f64c-4827-8826-dde8f2583e40/controller/0.log" Feb 27 09:22:25 crc kubenswrapper[4906]: I0227 09:22:25.687540 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zws26_9c195ff8-f64c-4827-8826-dde8f2583e40/frr-metrics/0.log" Feb 27 09:22:25 crc kubenswrapper[4906]: I0227 09:22:25.704743 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zws26_9c195ff8-f64c-4827-8826-dde8f2583e40/kube-rbac-proxy/0.log" Feb 27 09:22:25 crc kubenswrapper[4906]: I0227 09:22:25.757167 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zws26_9c195ff8-f64c-4827-8826-dde8f2583e40/kube-rbac-proxy-frr/0.log" Feb 27 09:22:25 crc kubenswrapper[4906]: I0227 09:22:25.973773 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zws26_9c195ff8-f64c-4827-8826-dde8f2583e40/reloader/0.log" Feb 27 09:22:26 crc kubenswrapper[4906]: I0227 09:22:26.005930 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-697697b9b5-v5fgw_97639e95-0dfe-4ed7-8702-f0de909b3c09/manager/0.log" Feb 27 09:22:26 crc kubenswrapper[4906]: I0227 09:22:26.204987 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-c5d4d9978-tst7c_9db8c955-a52a-43c4-b61c-67e5ac2e3938/webhook-server/0.log" Feb 27 09:22:26 crc kubenswrapper[4906]: I0227 09:22:26.359063 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-nqccr_e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf/kube-rbac-proxy/0.log" Feb 27 09:22:26 crc kubenswrapper[4906]: I0227 09:22:26.951902 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-nqccr_e1e76a6c-9ad0-41cd-9ed3-a1232c9674cf/speaker/0.log" Feb 27 09:22:27 crc kubenswrapper[4906]: I0227 09:22:27.285921 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-zws26_9c195ff8-f64c-4827-8826-dde8f2583e40/frr/0.log" Feb 27 09:22:36 crc kubenswrapper[4906]: I0227 09:22:36.558341 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:22:36 crc kubenswrapper[4906]: E0227 09:22:36.559024 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:22:39 crc kubenswrapper[4906]: I0227 09:22:39.482340 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg_4be2276e-319a-41b2-afe6-40b807bb398a/util/0.log" Feb 27 09:22:39 crc kubenswrapper[4906]: I0227 09:22:39.667494 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg_4be2276e-319a-41b2-afe6-40b807bb398a/util/0.log" Feb 27 09:22:39 crc kubenswrapper[4906]: I0227 09:22:39.704654 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg_4be2276e-319a-41b2-afe6-40b807bb398a/pull/0.log" Feb 27 09:22:39 crc kubenswrapper[4906]: I0227 09:22:39.704899 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg_4be2276e-319a-41b2-afe6-40b807bb398a/pull/0.log" Feb 27 09:22:39 crc kubenswrapper[4906]: I0227 09:22:39.936671 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg_4be2276e-319a-41b2-afe6-40b807bb398a/pull/0.log" Feb 27 09:22:39 crc kubenswrapper[4906]: I0227 09:22:39.954679 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg_4be2276e-319a-41b2-afe6-40b807bb398a/util/0.log" Feb 27 09:22:39 crc kubenswrapper[4906]: I0227 09:22:39.955271 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_0e94e7566f739476ccec6d16e58de3f1c434cfa3060893f90f3e473a82mxvfg_4be2276e-319a-41b2-afe6-40b807bb398a/extract/0.log" Feb 27 09:22:40 crc kubenswrapper[4906]: I0227 09:22:40.121768 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6bqtn_3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3/extract-utilities/0.log" Feb 27 09:22:40 crc kubenswrapper[4906]: I0227 09:22:40.286859 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6bqtn_3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3/extract-utilities/0.log" Feb 27 09:22:40 crc kubenswrapper[4906]: I0227 09:22:40.309663 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6bqtn_3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3/extract-content/0.log" Feb 27 09:22:40 crc kubenswrapper[4906]: I0227 09:22:40.314031 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6bqtn_3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3/extract-content/0.log" Feb 27 09:22:40 crc kubenswrapper[4906]: I0227 09:22:40.476835 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6bqtn_3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3/extract-utilities/0.log" Feb 27 09:22:40 crc kubenswrapper[4906]: I0227 09:22:40.539465 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6bqtn_3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3/extract-content/0.log" Feb 27 09:22:40 crc kubenswrapper[4906]: I0227 09:22:40.698368 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nzkhk_104e053c-89a4-4b45-b02f-26a3d6b0191c/extract-utilities/0.log" Feb 27 09:22:40 crc kubenswrapper[4906]: I0227 09:22:40.938756 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nzkhk_104e053c-89a4-4b45-b02f-26a3d6b0191c/extract-content/0.log" Feb 27 09:22:40 crc kubenswrapper[4906]: I0227 09:22:40.989275 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-6bqtn_3b9ee3cd-1efc-4df3-932e-27cc3ffee6e3/registry-server/0.log" Feb 27 09:22:40 crc kubenswrapper[4906]: I0227 09:22:40.989624 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nzkhk_104e053c-89a4-4b45-b02f-26a3d6b0191c/extract-utilities/0.log" Feb 27 09:22:40 crc kubenswrapper[4906]: I0227 09:22:40.994193 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nzkhk_104e053c-89a4-4b45-b02f-26a3d6b0191c/extract-content/0.log" Feb 27 09:22:41 crc kubenswrapper[4906]: I0227 09:22:41.128046 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nzkhk_104e053c-89a4-4b45-b02f-26a3d6b0191c/extract-utilities/0.log" Feb 27 09:22:41 crc kubenswrapper[4906]: I0227 09:22:41.169035 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nzkhk_104e053c-89a4-4b45-b02f-26a3d6b0191c/extract-content/0.log" Feb 27 09:22:41 crc kubenswrapper[4906]: I0227 09:22:41.330215 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6_1c0d5365-d44e-4f3c-ac5e-93a61d9f4272/util/0.log" Feb 27 09:22:41 crc kubenswrapper[4906]: I0227 09:22:41.494374 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6_1c0d5365-d44e-4f3c-ac5e-93a61d9f4272/util/0.log" Feb 27 09:22:41 crc kubenswrapper[4906]: I0227 09:22:41.534678 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6_1c0d5365-d44e-4f3c-ac5e-93a61d9f4272/pull/0.log" Feb 27 09:22:41 crc kubenswrapper[4906]: I0227 09:22:41.588234 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6_1c0d5365-d44e-4f3c-ac5e-93a61d9f4272/pull/0.log" Feb 27 09:22:41 crc kubenswrapper[4906]: I0227 09:22:41.744809 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nzkhk_104e053c-89a4-4b45-b02f-26a3d6b0191c/registry-server/0.log" Feb 27 09:22:41 crc kubenswrapper[4906]: I0227 09:22:41.770123 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6_1c0d5365-d44e-4f3c-ac5e-93a61d9f4272/pull/0.log" Feb 27 09:22:41 crc kubenswrapper[4906]: I0227 09:22:41.835481 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6_1c0d5365-d44e-4f3c-ac5e-93a61d9f4272/extract/0.log" Feb 27 09:22:41 crc kubenswrapper[4906]: I0227 09:22:41.837465 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_d146760600e43041070ad4572d9c23f31a62e3aefc01a54998863bc5f4v47h6_1c0d5365-d44e-4f3c-ac5e-93a61d9f4272/util/0.log" Feb 27 09:22:41 crc kubenswrapper[4906]: I0227 09:22:41.989444 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-4cb5p_93a85195-01f3-43e1-9a7e-7603a41b47a4/marketplace-operator/0.log" Feb 27 09:22:42 crc kubenswrapper[4906]: I0227 09:22:42.028801 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w5gxv_0eebf875-9038-4025-bc93-6d759229f64c/extract-utilities/0.log" Feb 27 09:22:42 crc kubenswrapper[4906]: I0227 09:22:42.212513 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w5gxv_0eebf875-9038-4025-bc93-6d759229f64c/extract-content/0.log" Feb 27 09:22:42 crc kubenswrapper[4906]: I0227 09:22:42.224920 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w5gxv_0eebf875-9038-4025-bc93-6d759229f64c/extract-content/0.log" Feb 27 09:22:42 crc kubenswrapper[4906]: I0227 09:22:42.233695 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w5gxv_0eebf875-9038-4025-bc93-6d759229f64c/extract-utilities/0.log" Feb 27 09:22:42 crc kubenswrapper[4906]: I0227 09:22:42.439239 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w5gxv_0eebf875-9038-4025-bc93-6d759229f64c/extract-content/0.log" Feb 27 09:22:42 crc kubenswrapper[4906]: I0227 09:22:42.456203 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w5gxv_0eebf875-9038-4025-bc93-6d759229f64c/extract-utilities/0.log" Feb 27 09:22:42 crc kubenswrapper[4906]: I0227 09:22:42.577591 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-w5gxv_0eebf875-9038-4025-bc93-6d759229f64c/registry-server/0.log" Feb 27 09:22:42 crc kubenswrapper[4906]: I0227 09:22:42.671311 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jsxjh_3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb/extract-utilities/0.log" Feb 27 09:22:42 crc kubenswrapper[4906]: I0227 09:22:42.847228 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jsxjh_3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb/extract-utilities/0.log" Feb 27 09:22:42 crc kubenswrapper[4906]: I0227 09:22:42.854223 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jsxjh_3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb/extract-content/0.log" Feb 27 09:22:42 crc kubenswrapper[4906]: I0227 09:22:42.867605 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jsxjh_3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb/extract-content/0.log" Feb 27 09:22:43 crc kubenswrapper[4906]: I0227 09:22:43.065132 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jsxjh_3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb/extract-content/0.log" Feb 27 09:22:43 crc kubenswrapper[4906]: I0227 09:22:43.091099 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jsxjh_3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb/extract-utilities/0.log" Feb 27 09:22:43 crc kubenswrapper[4906]: I0227 09:22:43.480770 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-jsxjh_3a626b5a-ab13-4bb5-9f57-9deb8bfc27bb/registry-server/0.log" Feb 27 09:22:48 crc kubenswrapper[4906]: I0227 09:22:48.552400 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:22:48 crc kubenswrapper[4906]: E0227 09:22:48.553011 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:23:01 crc kubenswrapper[4906]: I0227 09:23:01.552289 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:23:01 crc kubenswrapper[4906]: E0227 09:23:01.553152 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:23:15 crc kubenswrapper[4906]: I0227 09:23:15.552369 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:23:15 crc kubenswrapper[4906]: E0227 09:23:15.553074 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:23:29 crc kubenswrapper[4906]: I0227 09:23:29.552465 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:23:29 crc kubenswrapper[4906]: E0227 09:23:29.553218 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:23:41 crc kubenswrapper[4906]: I0227 09:23:41.561951 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:23:41 crc kubenswrapper[4906]: E0227 09:23:41.562854 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:23:55 crc kubenswrapper[4906]: I0227 09:23:55.552453 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:23:55 crc kubenswrapper[4906]: E0227 09:23:55.553057 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:24:00 crc kubenswrapper[4906]: I0227 09:24:00.209662 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536404-zm995"] Feb 27 09:24:00 crc kubenswrapper[4906]: E0227 09:24:00.210598 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7fc749ea-eaa6-4163-90e5-c4898efb6235" containerName="oc" Feb 27 09:24:00 crc kubenswrapper[4906]: I0227 09:24:00.210613 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="7fc749ea-eaa6-4163-90e5-c4898efb6235" containerName="oc" Feb 27 09:24:00 crc kubenswrapper[4906]: I0227 09:24:00.210799 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="7fc749ea-eaa6-4163-90e5-c4898efb6235" containerName="oc" Feb 27 09:24:00 crc kubenswrapper[4906]: I0227 09:24:00.211404 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536404-zm995" Feb 27 09:24:00 crc kubenswrapper[4906]: I0227 09:24:00.213930 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 09:24:00 crc kubenswrapper[4906]: I0227 09:24:00.214546 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 09:24:00 crc kubenswrapper[4906]: I0227 09:24:00.214538 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 09:24:00 crc kubenswrapper[4906]: I0227 09:24:00.219924 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536404-zm995"] Feb 27 09:24:00 crc kubenswrapper[4906]: I0227 09:24:00.282529 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwwzt\" (UniqueName: \"kubernetes.io/projected/41c7cdde-6325-4808-8471-ddc85be0fb33-kube-api-access-fwwzt\") pod \"auto-csr-approver-29536404-zm995\" (UID: \"41c7cdde-6325-4808-8471-ddc85be0fb33\") " pod="openshift-infra/auto-csr-approver-29536404-zm995" Feb 27 09:24:00 crc kubenswrapper[4906]: I0227 09:24:00.387448 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwwzt\" (UniqueName: \"kubernetes.io/projected/41c7cdde-6325-4808-8471-ddc85be0fb33-kube-api-access-fwwzt\") pod \"auto-csr-approver-29536404-zm995\" (UID: \"41c7cdde-6325-4808-8471-ddc85be0fb33\") " pod="openshift-infra/auto-csr-approver-29536404-zm995" Feb 27 09:24:00 crc kubenswrapper[4906]: I0227 09:24:00.412213 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwwzt\" (UniqueName: \"kubernetes.io/projected/41c7cdde-6325-4808-8471-ddc85be0fb33-kube-api-access-fwwzt\") pod \"auto-csr-approver-29536404-zm995\" (UID: \"41c7cdde-6325-4808-8471-ddc85be0fb33\") " pod="openshift-infra/auto-csr-approver-29536404-zm995" Feb 27 09:24:00 crc kubenswrapper[4906]: I0227 09:24:00.541839 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536404-zm995" Feb 27 09:24:01 crc kubenswrapper[4906]: I0227 09:24:01.007729 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536404-zm995"] Feb 27 09:24:01 crc kubenswrapper[4906]: W0227 09:24:01.014666 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41c7cdde_6325_4808_8471_ddc85be0fb33.slice/crio-b92916834af3dfb5f6bd41d0edf5b898bc7a7cb6995a1167a9399f37cdb29450 WatchSource:0}: Error finding container b92916834af3dfb5f6bd41d0edf5b898bc7a7cb6995a1167a9399f37cdb29450: Status 404 returned error can't find the container with id b92916834af3dfb5f6bd41d0edf5b898bc7a7cb6995a1167a9399f37cdb29450 Feb 27 09:24:01 crc kubenswrapper[4906]: I0227 09:24:01.932632 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536404-zm995" event={"ID":"41c7cdde-6325-4808-8471-ddc85be0fb33","Type":"ContainerStarted","Data":"b92916834af3dfb5f6bd41d0edf5b898bc7a7cb6995a1167a9399f37cdb29450"} Feb 27 09:24:02 crc kubenswrapper[4906]: I0227 09:24:02.941106 4906 generic.go:334] "Generic (PLEG): container finished" podID="41c7cdde-6325-4808-8471-ddc85be0fb33" containerID="9b1027fe03b63d66ba88f7112c8a1caee8eca10fddf65f56ff332c983ff65c2a" exitCode=0 Feb 27 09:24:02 crc kubenswrapper[4906]: I0227 09:24:02.941166 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536404-zm995" event={"ID":"41c7cdde-6325-4808-8471-ddc85be0fb33","Type":"ContainerDied","Data":"9b1027fe03b63d66ba88f7112c8a1caee8eca10fddf65f56ff332c983ff65c2a"} Feb 27 09:24:04 crc kubenswrapper[4906]: I0227 09:24:04.350577 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536404-zm995" Feb 27 09:24:04 crc kubenswrapper[4906]: I0227 09:24:04.479148 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwwzt\" (UniqueName: \"kubernetes.io/projected/41c7cdde-6325-4808-8471-ddc85be0fb33-kube-api-access-fwwzt\") pod \"41c7cdde-6325-4808-8471-ddc85be0fb33\" (UID: \"41c7cdde-6325-4808-8471-ddc85be0fb33\") " Feb 27 09:24:04 crc kubenswrapper[4906]: I0227 09:24:04.484972 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/41c7cdde-6325-4808-8471-ddc85be0fb33-kube-api-access-fwwzt" (OuterVolumeSpecName: "kube-api-access-fwwzt") pod "41c7cdde-6325-4808-8471-ddc85be0fb33" (UID: "41c7cdde-6325-4808-8471-ddc85be0fb33"). InnerVolumeSpecName "kube-api-access-fwwzt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:24:04 crc kubenswrapper[4906]: I0227 09:24:04.582010 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwwzt\" (UniqueName: \"kubernetes.io/projected/41c7cdde-6325-4808-8471-ddc85be0fb33-kube-api-access-fwwzt\") on node \"crc\" DevicePath \"\"" Feb 27 09:24:04 crc kubenswrapper[4906]: I0227 09:24:04.958846 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536404-zm995" event={"ID":"41c7cdde-6325-4808-8471-ddc85be0fb33","Type":"ContainerDied","Data":"b92916834af3dfb5f6bd41d0edf5b898bc7a7cb6995a1167a9399f37cdb29450"} Feb 27 09:24:04 crc kubenswrapper[4906]: I0227 09:24:04.959258 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b92916834af3dfb5f6bd41d0edf5b898bc7a7cb6995a1167a9399f37cdb29450" Feb 27 09:24:04 crc kubenswrapper[4906]: I0227 09:24:04.959326 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536404-zm995" Feb 27 09:24:05 crc kubenswrapper[4906]: I0227 09:24:05.450636 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536398-bhq4t"] Feb 27 09:24:05 crc kubenswrapper[4906]: I0227 09:24:05.462910 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536398-bhq4t"] Feb 27 09:24:06 crc kubenswrapper[4906]: I0227 09:24:06.551731 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:24:06 crc kubenswrapper[4906]: E0227 09:24:06.552070 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:24:06 crc kubenswrapper[4906]: I0227 09:24:06.562847 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ec8a8ad7-ef55-4952-82ca-1e9b76e3016b" path="/var/lib/kubelet/pods/ec8a8ad7-ef55-4952-82ca-1e9b76e3016b/volumes" Feb 27 09:24:07 crc kubenswrapper[4906]: I0227 09:24:07.732806 4906 scope.go:117] "RemoveContainer" containerID="b57514a561900453566b9ebb8b5cd14eb3df055bb8f541e4fb34d0768ebc3ab7" Feb 27 09:24:19 crc kubenswrapper[4906]: I0227 09:24:19.552410 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:24:19 crc kubenswrapper[4906]: E0227 09:24:19.556318 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:24:26 crc kubenswrapper[4906]: I0227 09:24:26.185666 4906 generic.go:334] "Generic (PLEG): container finished" podID="679afc7e-dcf2-409a-8b09-8af90de74b78" containerID="057de8cfcc67191e3cf82492708cd774bb50fccaeb45149cf3f74be3e12611b1" exitCode=0 Feb 27 09:24:26 crc kubenswrapper[4906]: I0227 09:24:26.185971 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-6zg9v/must-gather-4b62r" event={"ID":"679afc7e-dcf2-409a-8b09-8af90de74b78","Type":"ContainerDied","Data":"057de8cfcc67191e3cf82492708cd774bb50fccaeb45149cf3f74be3e12611b1"} Feb 27 09:24:26 crc kubenswrapper[4906]: I0227 09:24:26.187553 4906 scope.go:117] "RemoveContainer" containerID="057de8cfcc67191e3cf82492708cd774bb50fccaeb45149cf3f74be3e12611b1" Feb 27 09:24:27 crc kubenswrapper[4906]: I0227 09:24:27.235940 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-6zg9v_must-gather-4b62r_679afc7e-dcf2-409a-8b09-8af90de74b78/gather/0.log" Feb 27 09:24:31 crc kubenswrapper[4906]: I0227 09:24:31.551767 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:24:31 crc kubenswrapper[4906]: E0227 09:24:31.552597 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:24:34 crc kubenswrapper[4906]: I0227 09:24:34.709134 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-6zg9v/must-gather-4b62r"] Feb 27 09:24:34 crc kubenswrapper[4906]: I0227 09:24:34.710198 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-6zg9v/must-gather-4b62r" podUID="679afc7e-dcf2-409a-8b09-8af90de74b78" containerName="copy" containerID="cri-o://30d354684b73a93a5f0406365c55c0a364cdcb4da8dfece5f69988d9717bb6ec" gracePeriod=2 Feb 27 09:24:34 crc kubenswrapper[4906]: I0227 09:24:34.725317 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-6zg9v/must-gather-4b62r"] Feb 27 09:24:35 crc kubenswrapper[4906]: I0227 09:24:35.135681 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-6zg9v_must-gather-4b62r_679afc7e-dcf2-409a-8b09-8af90de74b78/copy/0.log" Feb 27 09:24:35 crc kubenswrapper[4906]: I0227 09:24:35.137135 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6zg9v/must-gather-4b62r" Feb 27 09:24:35 crc kubenswrapper[4906]: I0227 09:24:35.272080 4906 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-6zg9v_must-gather-4b62r_679afc7e-dcf2-409a-8b09-8af90de74b78/copy/0.log" Feb 27 09:24:35 crc kubenswrapper[4906]: I0227 09:24:35.272834 4906 generic.go:334] "Generic (PLEG): container finished" podID="679afc7e-dcf2-409a-8b09-8af90de74b78" containerID="30d354684b73a93a5f0406365c55c0a364cdcb4da8dfece5f69988d9717bb6ec" exitCode=143 Feb 27 09:24:35 crc kubenswrapper[4906]: I0227 09:24:35.272931 4906 scope.go:117] "RemoveContainer" containerID="30d354684b73a93a5f0406365c55c0a364cdcb4da8dfece5f69988d9717bb6ec" Feb 27 09:24:35 crc kubenswrapper[4906]: I0227 09:24:35.272931 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-6zg9v/must-gather-4b62r" Feb 27 09:24:35 crc kubenswrapper[4906]: I0227 09:24:35.283634 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qpvjn\" (UniqueName: \"kubernetes.io/projected/679afc7e-dcf2-409a-8b09-8af90de74b78-kube-api-access-qpvjn\") pod \"679afc7e-dcf2-409a-8b09-8af90de74b78\" (UID: \"679afc7e-dcf2-409a-8b09-8af90de74b78\") " Feb 27 09:24:35 crc kubenswrapper[4906]: I0227 09:24:35.284123 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/679afc7e-dcf2-409a-8b09-8af90de74b78-must-gather-output\") pod \"679afc7e-dcf2-409a-8b09-8af90de74b78\" (UID: \"679afc7e-dcf2-409a-8b09-8af90de74b78\") " Feb 27 09:24:35 crc kubenswrapper[4906]: I0227 09:24:35.290691 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/679afc7e-dcf2-409a-8b09-8af90de74b78-kube-api-access-qpvjn" (OuterVolumeSpecName: "kube-api-access-qpvjn") pod "679afc7e-dcf2-409a-8b09-8af90de74b78" (UID: "679afc7e-dcf2-409a-8b09-8af90de74b78"). InnerVolumeSpecName "kube-api-access-qpvjn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:24:35 crc kubenswrapper[4906]: I0227 09:24:35.307341 4906 scope.go:117] "RemoveContainer" containerID="057de8cfcc67191e3cf82492708cd774bb50fccaeb45149cf3f74be3e12611b1" Feb 27 09:24:35 crc kubenswrapper[4906]: I0227 09:24:35.387950 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qpvjn\" (UniqueName: \"kubernetes.io/projected/679afc7e-dcf2-409a-8b09-8af90de74b78-kube-api-access-qpvjn\") on node \"crc\" DevicePath \"\"" Feb 27 09:24:35 crc kubenswrapper[4906]: I0227 09:24:35.421079 4906 scope.go:117] "RemoveContainer" containerID="30d354684b73a93a5f0406365c55c0a364cdcb4da8dfece5f69988d9717bb6ec" Feb 27 09:24:35 crc kubenswrapper[4906]: E0227 09:24:35.421724 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"30d354684b73a93a5f0406365c55c0a364cdcb4da8dfece5f69988d9717bb6ec\": container with ID starting with 30d354684b73a93a5f0406365c55c0a364cdcb4da8dfece5f69988d9717bb6ec not found: ID does not exist" containerID="30d354684b73a93a5f0406365c55c0a364cdcb4da8dfece5f69988d9717bb6ec" Feb 27 09:24:35 crc kubenswrapper[4906]: I0227 09:24:35.421776 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"30d354684b73a93a5f0406365c55c0a364cdcb4da8dfece5f69988d9717bb6ec"} err="failed to get container status \"30d354684b73a93a5f0406365c55c0a364cdcb4da8dfece5f69988d9717bb6ec\": rpc error: code = NotFound desc = could not find container \"30d354684b73a93a5f0406365c55c0a364cdcb4da8dfece5f69988d9717bb6ec\": container with ID starting with 30d354684b73a93a5f0406365c55c0a364cdcb4da8dfece5f69988d9717bb6ec not found: ID does not exist" Feb 27 09:24:35 crc kubenswrapper[4906]: I0227 09:24:35.421815 4906 scope.go:117] "RemoveContainer" containerID="057de8cfcc67191e3cf82492708cd774bb50fccaeb45149cf3f74be3e12611b1" Feb 27 09:24:35 crc kubenswrapper[4906]: E0227 09:24:35.422262 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"057de8cfcc67191e3cf82492708cd774bb50fccaeb45149cf3f74be3e12611b1\": container with ID starting with 057de8cfcc67191e3cf82492708cd774bb50fccaeb45149cf3f74be3e12611b1 not found: ID does not exist" containerID="057de8cfcc67191e3cf82492708cd774bb50fccaeb45149cf3f74be3e12611b1" Feb 27 09:24:35 crc kubenswrapper[4906]: I0227 09:24:35.422346 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"057de8cfcc67191e3cf82492708cd774bb50fccaeb45149cf3f74be3e12611b1"} err="failed to get container status \"057de8cfcc67191e3cf82492708cd774bb50fccaeb45149cf3f74be3e12611b1\": rpc error: code = NotFound desc = could not find container \"057de8cfcc67191e3cf82492708cd774bb50fccaeb45149cf3f74be3e12611b1\": container with ID starting with 057de8cfcc67191e3cf82492708cd774bb50fccaeb45149cf3f74be3e12611b1 not found: ID does not exist" Feb 27 09:24:35 crc kubenswrapper[4906]: I0227 09:24:35.458287 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/679afc7e-dcf2-409a-8b09-8af90de74b78-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "679afc7e-dcf2-409a-8b09-8af90de74b78" (UID: "679afc7e-dcf2-409a-8b09-8af90de74b78"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:24:35 crc kubenswrapper[4906]: I0227 09:24:35.489771 4906 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/679afc7e-dcf2-409a-8b09-8af90de74b78-must-gather-output\") on node \"crc\" DevicePath \"\"" Feb 27 09:24:36 crc kubenswrapper[4906]: I0227 09:24:36.566156 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="679afc7e-dcf2-409a-8b09-8af90de74b78" path="/var/lib/kubelet/pods/679afc7e-dcf2-409a-8b09-8af90de74b78/volumes" Feb 27 09:24:43 crc kubenswrapper[4906]: I0227 09:24:43.551800 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:24:43 crc kubenswrapper[4906]: E0227 09:24:43.552761 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:24:44 crc kubenswrapper[4906]: I0227 09:24:44.290077 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-7jkm4"] Feb 27 09:24:44 crc kubenswrapper[4906]: E0227 09:24:44.290639 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="679afc7e-dcf2-409a-8b09-8af90de74b78" containerName="gather" Feb 27 09:24:44 crc kubenswrapper[4906]: I0227 09:24:44.290663 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="679afc7e-dcf2-409a-8b09-8af90de74b78" containerName="gather" Feb 27 09:24:44 crc kubenswrapper[4906]: E0227 09:24:44.290701 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="41c7cdde-6325-4808-8471-ddc85be0fb33" containerName="oc" Feb 27 09:24:44 crc kubenswrapper[4906]: I0227 09:24:44.290712 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="41c7cdde-6325-4808-8471-ddc85be0fb33" containerName="oc" Feb 27 09:24:44 crc kubenswrapper[4906]: E0227 09:24:44.290743 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="679afc7e-dcf2-409a-8b09-8af90de74b78" containerName="copy" Feb 27 09:24:44 crc kubenswrapper[4906]: I0227 09:24:44.290757 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="679afc7e-dcf2-409a-8b09-8af90de74b78" containerName="copy" Feb 27 09:24:44 crc kubenswrapper[4906]: I0227 09:24:44.291076 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="41c7cdde-6325-4808-8471-ddc85be0fb33" containerName="oc" Feb 27 09:24:44 crc kubenswrapper[4906]: I0227 09:24:44.291101 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="679afc7e-dcf2-409a-8b09-8af90de74b78" containerName="gather" Feb 27 09:24:44 crc kubenswrapper[4906]: I0227 09:24:44.291133 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="679afc7e-dcf2-409a-8b09-8af90de74b78" containerName="copy" Feb 27 09:24:44 crc kubenswrapper[4906]: I0227 09:24:44.293401 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7jkm4" Feb 27 09:24:44 crc kubenswrapper[4906]: I0227 09:24:44.340403 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7jkm4"] Feb 27 09:24:44 crc kubenswrapper[4906]: I0227 09:24:44.369350 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa75840f-7fe3-487d-93e6-7af78d580b00-catalog-content\") pod \"redhat-operators-7jkm4\" (UID: \"fa75840f-7fe3-487d-93e6-7af78d580b00\") " pod="openshift-marketplace/redhat-operators-7jkm4" Feb 27 09:24:44 crc kubenswrapper[4906]: I0227 09:24:44.369464 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qndf\" (UniqueName: \"kubernetes.io/projected/fa75840f-7fe3-487d-93e6-7af78d580b00-kube-api-access-2qndf\") pod \"redhat-operators-7jkm4\" (UID: \"fa75840f-7fe3-487d-93e6-7af78d580b00\") " pod="openshift-marketplace/redhat-operators-7jkm4" Feb 27 09:24:44 crc kubenswrapper[4906]: I0227 09:24:44.369600 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa75840f-7fe3-487d-93e6-7af78d580b00-utilities\") pod \"redhat-operators-7jkm4\" (UID: \"fa75840f-7fe3-487d-93e6-7af78d580b00\") " pod="openshift-marketplace/redhat-operators-7jkm4" Feb 27 09:24:44 crc kubenswrapper[4906]: I0227 09:24:44.471356 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qndf\" (UniqueName: \"kubernetes.io/projected/fa75840f-7fe3-487d-93e6-7af78d580b00-kube-api-access-2qndf\") pod \"redhat-operators-7jkm4\" (UID: \"fa75840f-7fe3-487d-93e6-7af78d580b00\") " pod="openshift-marketplace/redhat-operators-7jkm4" Feb 27 09:24:44 crc kubenswrapper[4906]: I0227 09:24:44.471488 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa75840f-7fe3-487d-93e6-7af78d580b00-utilities\") pod \"redhat-operators-7jkm4\" (UID: \"fa75840f-7fe3-487d-93e6-7af78d580b00\") " pod="openshift-marketplace/redhat-operators-7jkm4" Feb 27 09:24:44 crc kubenswrapper[4906]: I0227 09:24:44.471586 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa75840f-7fe3-487d-93e6-7af78d580b00-catalog-content\") pod \"redhat-operators-7jkm4\" (UID: \"fa75840f-7fe3-487d-93e6-7af78d580b00\") " pod="openshift-marketplace/redhat-operators-7jkm4" Feb 27 09:24:44 crc kubenswrapper[4906]: I0227 09:24:44.472182 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa75840f-7fe3-487d-93e6-7af78d580b00-catalog-content\") pod \"redhat-operators-7jkm4\" (UID: \"fa75840f-7fe3-487d-93e6-7af78d580b00\") " pod="openshift-marketplace/redhat-operators-7jkm4" Feb 27 09:24:44 crc kubenswrapper[4906]: I0227 09:24:44.472705 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa75840f-7fe3-487d-93e6-7af78d580b00-utilities\") pod \"redhat-operators-7jkm4\" (UID: \"fa75840f-7fe3-487d-93e6-7af78d580b00\") " pod="openshift-marketplace/redhat-operators-7jkm4" Feb 27 09:24:44 crc kubenswrapper[4906]: I0227 09:24:44.496591 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qndf\" (UniqueName: \"kubernetes.io/projected/fa75840f-7fe3-487d-93e6-7af78d580b00-kube-api-access-2qndf\") pod \"redhat-operators-7jkm4\" (UID: \"fa75840f-7fe3-487d-93e6-7af78d580b00\") " pod="openshift-marketplace/redhat-operators-7jkm4" Feb 27 09:24:44 crc kubenswrapper[4906]: I0227 09:24:44.638868 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7jkm4" Feb 27 09:24:45 crc kubenswrapper[4906]: I0227 09:24:45.129064 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-7jkm4"] Feb 27 09:24:45 crc kubenswrapper[4906]: I0227 09:24:45.403623 4906 generic.go:334] "Generic (PLEG): container finished" podID="fa75840f-7fe3-487d-93e6-7af78d580b00" containerID="91e39678664c89036e564bbc9a258f85c94ae30fa5bbecc5afdf76f608527612" exitCode=0 Feb 27 09:24:45 crc kubenswrapper[4906]: I0227 09:24:45.403724 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7jkm4" event={"ID":"fa75840f-7fe3-487d-93e6-7af78d580b00","Type":"ContainerDied","Data":"91e39678664c89036e564bbc9a258f85c94ae30fa5bbecc5afdf76f608527612"} Feb 27 09:24:45 crc kubenswrapper[4906]: I0227 09:24:45.404022 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7jkm4" event={"ID":"fa75840f-7fe3-487d-93e6-7af78d580b00","Type":"ContainerStarted","Data":"bf6c8b2c107af777b11a401d67d6f21dd8cf166690ad326681dedd14e1af6dc8"} Feb 27 09:24:46 crc kubenswrapper[4906]: I0227 09:24:46.419698 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7jkm4" event={"ID":"fa75840f-7fe3-487d-93e6-7af78d580b00","Type":"ContainerStarted","Data":"3e57fc2b339a795a952b6e1679568b69537ea2cde326d8b4d25b02c5bd3477ec"} Feb 27 09:24:50 crc kubenswrapper[4906]: I0227 09:24:50.469558 4906 generic.go:334] "Generic (PLEG): container finished" podID="fa75840f-7fe3-487d-93e6-7af78d580b00" containerID="3e57fc2b339a795a952b6e1679568b69537ea2cde326d8b4d25b02c5bd3477ec" exitCode=0 Feb 27 09:24:50 crc kubenswrapper[4906]: I0227 09:24:50.469609 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7jkm4" event={"ID":"fa75840f-7fe3-487d-93e6-7af78d580b00","Type":"ContainerDied","Data":"3e57fc2b339a795a952b6e1679568b69537ea2cde326d8b4d25b02c5bd3477ec"} Feb 27 09:24:51 crc kubenswrapper[4906]: I0227 09:24:51.503959 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7jkm4" event={"ID":"fa75840f-7fe3-487d-93e6-7af78d580b00","Type":"ContainerStarted","Data":"4f7075b3d16c92fe3a5a374fa8c1bf2df1a77ec860c4d1a9e44ad1bef16e46e3"} Feb 27 09:24:51 crc kubenswrapper[4906]: I0227 09:24:51.534259 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-7jkm4" podStartSLOduration=2.069295243 podStartE2EDuration="7.534236501s" podCreationTimestamp="2026-02-27 09:24:44 +0000 UTC" firstStartedPulling="2026-02-27 09:24:45.405561744 +0000 UTC m=+3383.799963354" lastFinishedPulling="2026-02-27 09:24:50.870502982 +0000 UTC m=+3389.264904612" observedRunningTime="2026-02-27 09:24:51.524539633 +0000 UTC m=+3389.918941243" watchObservedRunningTime="2026-02-27 09:24:51.534236501 +0000 UTC m=+3389.928638111" Feb 27 09:24:54 crc kubenswrapper[4906]: I0227 09:24:54.639721 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-7jkm4" Feb 27 09:24:54 crc kubenswrapper[4906]: I0227 09:24:54.640125 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-7jkm4" Feb 27 09:24:55 crc kubenswrapper[4906]: I0227 09:24:55.704561 4906 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-7jkm4" podUID="fa75840f-7fe3-487d-93e6-7af78d580b00" containerName="registry-server" probeResult="failure" output=< Feb 27 09:24:55 crc kubenswrapper[4906]: timeout: failed to connect service ":50051" within 1s Feb 27 09:24:55 crc kubenswrapper[4906]: > Feb 27 09:24:56 crc kubenswrapper[4906]: I0227 09:24:56.552255 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:24:56 crc kubenswrapper[4906]: E0227 09:24:56.552533 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:25:04 crc kubenswrapper[4906]: I0227 09:25:04.707626 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-7jkm4" Feb 27 09:25:04 crc kubenswrapper[4906]: I0227 09:25:04.777232 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-7jkm4" Feb 27 09:25:04 crc kubenswrapper[4906]: I0227 09:25:04.950340 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7jkm4"] Feb 27 09:25:06 crc kubenswrapper[4906]: I0227 09:25:06.668042 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-7jkm4" podUID="fa75840f-7fe3-487d-93e6-7af78d580b00" containerName="registry-server" containerID="cri-o://4f7075b3d16c92fe3a5a374fa8c1bf2df1a77ec860c4d1a9e44ad1bef16e46e3" gracePeriod=2 Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.178364 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7jkm4" Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.315992 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa75840f-7fe3-487d-93e6-7af78d580b00-utilities\") pod \"fa75840f-7fe3-487d-93e6-7af78d580b00\" (UID: \"fa75840f-7fe3-487d-93e6-7af78d580b00\") " Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.316077 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa75840f-7fe3-487d-93e6-7af78d580b00-catalog-content\") pod \"fa75840f-7fe3-487d-93e6-7af78d580b00\" (UID: \"fa75840f-7fe3-487d-93e6-7af78d580b00\") " Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.316310 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qndf\" (UniqueName: \"kubernetes.io/projected/fa75840f-7fe3-487d-93e6-7af78d580b00-kube-api-access-2qndf\") pod \"fa75840f-7fe3-487d-93e6-7af78d580b00\" (UID: \"fa75840f-7fe3-487d-93e6-7af78d580b00\") " Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.318312 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa75840f-7fe3-487d-93e6-7af78d580b00-utilities" (OuterVolumeSpecName: "utilities") pod "fa75840f-7fe3-487d-93e6-7af78d580b00" (UID: "fa75840f-7fe3-487d-93e6-7af78d580b00"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.334139 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa75840f-7fe3-487d-93e6-7af78d580b00-kube-api-access-2qndf" (OuterVolumeSpecName: "kube-api-access-2qndf") pod "fa75840f-7fe3-487d-93e6-7af78d580b00" (UID: "fa75840f-7fe3-487d-93e6-7af78d580b00"). InnerVolumeSpecName "kube-api-access-2qndf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.418929 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fa75840f-7fe3-487d-93e6-7af78d580b00-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.419088 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qndf\" (UniqueName: \"kubernetes.io/projected/fa75840f-7fe3-487d-93e6-7af78d580b00-kube-api-access-2qndf\") on node \"crc\" DevicePath \"\"" Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.477732 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fa75840f-7fe3-487d-93e6-7af78d580b00-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fa75840f-7fe3-487d-93e6-7af78d580b00" (UID: "fa75840f-7fe3-487d-93e6-7af78d580b00"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.520950 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fa75840f-7fe3-487d-93e6-7af78d580b00-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.680560 4906 generic.go:334] "Generic (PLEG): container finished" podID="fa75840f-7fe3-487d-93e6-7af78d580b00" containerID="4f7075b3d16c92fe3a5a374fa8c1bf2df1a77ec860c4d1a9e44ad1bef16e46e3" exitCode=0 Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.680618 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7jkm4" event={"ID":"fa75840f-7fe3-487d-93e6-7af78d580b00","Type":"ContainerDied","Data":"4f7075b3d16c92fe3a5a374fa8c1bf2df1a77ec860c4d1a9e44ad1bef16e46e3"} Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.680674 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-7jkm4" Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.680703 4906 scope.go:117] "RemoveContainer" containerID="4f7075b3d16c92fe3a5a374fa8c1bf2df1a77ec860c4d1a9e44ad1bef16e46e3" Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.680685 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-7jkm4" event={"ID":"fa75840f-7fe3-487d-93e6-7af78d580b00","Type":"ContainerDied","Data":"bf6c8b2c107af777b11a401d67d6f21dd8cf166690ad326681dedd14e1af6dc8"} Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.701035 4906 scope.go:117] "RemoveContainer" containerID="3e57fc2b339a795a952b6e1679568b69537ea2cde326d8b4d25b02c5bd3477ec" Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.726213 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-7jkm4"] Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.733687 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-7jkm4"] Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.745739 4906 scope.go:117] "RemoveContainer" containerID="91e39678664c89036e564bbc9a258f85c94ae30fa5bbecc5afdf76f608527612" Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.763943 4906 scope.go:117] "RemoveContainer" containerID="4f7075b3d16c92fe3a5a374fa8c1bf2df1a77ec860c4d1a9e44ad1bef16e46e3" Feb 27 09:25:07 crc kubenswrapper[4906]: E0227 09:25:07.764343 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f7075b3d16c92fe3a5a374fa8c1bf2df1a77ec860c4d1a9e44ad1bef16e46e3\": container with ID starting with 4f7075b3d16c92fe3a5a374fa8c1bf2df1a77ec860c4d1a9e44ad1bef16e46e3 not found: ID does not exist" containerID="4f7075b3d16c92fe3a5a374fa8c1bf2df1a77ec860c4d1a9e44ad1bef16e46e3" Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.764374 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f7075b3d16c92fe3a5a374fa8c1bf2df1a77ec860c4d1a9e44ad1bef16e46e3"} err="failed to get container status \"4f7075b3d16c92fe3a5a374fa8c1bf2df1a77ec860c4d1a9e44ad1bef16e46e3\": rpc error: code = NotFound desc = could not find container \"4f7075b3d16c92fe3a5a374fa8c1bf2df1a77ec860c4d1a9e44ad1bef16e46e3\": container with ID starting with 4f7075b3d16c92fe3a5a374fa8c1bf2df1a77ec860c4d1a9e44ad1bef16e46e3 not found: ID does not exist" Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.764395 4906 scope.go:117] "RemoveContainer" containerID="3e57fc2b339a795a952b6e1679568b69537ea2cde326d8b4d25b02c5bd3477ec" Feb 27 09:25:07 crc kubenswrapper[4906]: E0227 09:25:07.764799 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3e57fc2b339a795a952b6e1679568b69537ea2cde326d8b4d25b02c5bd3477ec\": container with ID starting with 3e57fc2b339a795a952b6e1679568b69537ea2cde326d8b4d25b02c5bd3477ec not found: ID does not exist" containerID="3e57fc2b339a795a952b6e1679568b69537ea2cde326d8b4d25b02c5bd3477ec" Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.764848 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3e57fc2b339a795a952b6e1679568b69537ea2cde326d8b4d25b02c5bd3477ec"} err="failed to get container status \"3e57fc2b339a795a952b6e1679568b69537ea2cde326d8b4d25b02c5bd3477ec\": rpc error: code = NotFound desc = could not find container \"3e57fc2b339a795a952b6e1679568b69537ea2cde326d8b4d25b02c5bd3477ec\": container with ID starting with 3e57fc2b339a795a952b6e1679568b69537ea2cde326d8b4d25b02c5bd3477ec not found: ID does not exist" Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.764897 4906 scope.go:117] "RemoveContainer" containerID="91e39678664c89036e564bbc9a258f85c94ae30fa5bbecc5afdf76f608527612" Feb 27 09:25:07 crc kubenswrapper[4906]: E0227 09:25:07.765281 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91e39678664c89036e564bbc9a258f85c94ae30fa5bbecc5afdf76f608527612\": container with ID starting with 91e39678664c89036e564bbc9a258f85c94ae30fa5bbecc5afdf76f608527612 not found: ID does not exist" containerID="91e39678664c89036e564bbc9a258f85c94ae30fa5bbecc5afdf76f608527612" Feb 27 09:25:07 crc kubenswrapper[4906]: I0227 09:25:07.765308 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91e39678664c89036e564bbc9a258f85c94ae30fa5bbecc5afdf76f608527612"} err="failed to get container status \"91e39678664c89036e564bbc9a258f85c94ae30fa5bbecc5afdf76f608527612\": rpc error: code = NotFound desc = could not find container \"91e39678664c89036e564bbc9a258f85c94ae30fa5bbecc5afdf76f608527612\": container with ID starting with 91e39678664c89036e564bbc9a258f85c94ae30fa5bbecc5afdf76f608527612 not found: ID does not exist" Feb 27 09:25:08 crc kubenswrapper[4906]: I0227 09:25:08.552832 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:25:08 crc kubenswrapper[4906]: E0227 09:25:08.553536 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:25:08 crc kubenswrapper[4906]: I0227 09:25:08.566353 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa75840f-7fe3-487d-93e6-7af78d580b00" path="/var/lib/kubelet/pods/fa75840f-7fe3-487d-93e6-7af78d580b00/volumes" Feb 27 09:25:21 crc kubenswrapper[4906]: I0227 09:25:21.552715 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:25:21 crc kubenswrapper[4906]: E0227 09:25:21.553393 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:25:33 crc kubenswrapper[4906]: I0227 09:25:33.553207 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:25:33 crc kubenswrapper[4906]: E0227 09:25:33.555568 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:25:48 crc kubenswrapper[4906]: I0227 09:25:48.552431 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:25:48 crc kubenswrapper[4906]: E0227 09:25:48.553314 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:26:00 crc kubenswrapper[4906]: I0227 09:26:00.147149 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536406-p2x5h"] Feb 27 09:26:00 crc kubenswrapper[4906]: E0227 09:26:00.148220 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa75840f-7fe3-487d-93e6-7af78d580b00" containerName="extract-utilities" Feb 27 09:26:00 crc kubenswrapper[4906]: I0227 09:26:00.148239 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa75840f-7fe3-487d-93e6-7af78d580b00" containerName="extract-utilities" Feb 27 09:26:00 crc kubenswrapper[4906]: E0227 09:26:00.148277 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa75840f-7fe3-487d-93e6-7af78d580b00" containerName="extract-content" Feb 27 09:26:00 crc kubenswrapper[4906]: I0227 09:26:00.148286 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa75840f-7fe3-487d-93e6-7af78d580b00" containerName="extract-content" Feb 27 09:26:00 crc kubenswrapper[4906]: E0227 09:26:00.148317 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa75840f-7fe3-487d-93e6-7af78d580b00" containerName="registry-server" Feb 27 09:26:00 crc kubenswrapper[4906]: I0227 09:26:00.148327 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa75840f-7fe3-487d-93e6-7af78d580b00" containerName="registry-server" Feb 27 09:26:00 crc kubenswrapper[4906]: I0227 09:26:00.148525 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa75840f-7fe3-487d-93e6-7af78d580b00" containerName="registry-server" Feb 27 09:26:00 crc kubenswrapper[4906]: I0227 09:26:00.149378 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536406-p2x5h" Feb 27 09:26:00 crc kubenswrapper[4906]: I0227 09:26:00.151643 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 09:26:00 crc kubenswrapper[4906]: I0227 09:26:00.153649 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 09:26:00 crc kubenswrapper[4906]: I0227 09:26:00.155788 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 09:26:00 crc kubenswrapper[4906]: I0227 09:26:00.158621 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536406-p2x5h"] Feb 27 09:26:00 crc kubenswrapper[4906]: I0227 09:26:00.340474 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfll2\" (UniqueName: \"kubernetes.io/projected/e474a330-5abe-41f3-96a2-4ffea13d4307-kube-api-access-rfll2\") pod \"auto-csr-approver-29536406-p2x5h\" (UID: \"e474a330-5abe-41f3-96a2-4ffea13d4307\") " pod="openshift-infra/auto-csr-approver-29536406-p2x5h" Feb 27 09:26:00 crc kubenswrapper[4906]: I0227 09:26:00.442428 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfll2\" (UniqueName: \"kubernetes.io/projected/e474a330-5abe-41f3-96a2-4ffea13d4307-kube-api-access-rfll2\") pod \"auto-csr-approver-29536406-p2x5h\" (UID: \"e474a330-5abe-41f3-96a2-4ffea13d4307\") " pod="openshift-infra/auto-csr-approver-29536406-p2x5h" Feb 27 09:26:00 crc kubenswrapper[4906]: I0227 09:26:00.460265 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfll2\" (UniqueName: \"kubernetes.io/projected/e474a330-5abe-41f3-96a2-4ffea13d4307-kube-api-access-rfll2\") pod \"auto-csr-approver-29536406-p2x5h\" (UID: \"e474a330-5abe-41f3-96a2-4ffea13d4307\") " pod="openshift-infra/auto-csr-approver-29536406-p2x5h" Feb 27 09:26:00 crc kubenswrapper[4906]: I0227 09:26:00.473444 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536406-p2x5h" Feb 27 09:26:00 crc kubenswrapper[4906]: I0227 09:26:00.965676 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536406-p2x5h"] Feb 27 09:26:01 crc kubenswrapper[4906]: I0227 09:26:01.178543 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536406-p2x5h" event={"ID":"e474a330-5abe-41f3-96a2-4ffea13d4307","Type":"ContainerStarted","Data":"78f6dee4753615136c712ca4fd74b5b38d0081082087c0f66e45392aa3529ada"} Feb 27 09:26:01 crc kubenswrapper[4906]: I0227 09:26:01.551654 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:26:01 crc kubenswrapper[4906]: E0227 09:26:01.552097 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:26:02 crc kubenswrapper[4906]: I0227 09:26:02.189237 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536406-p2x5h" event={"ID":"e474a330-5abe-41f3-96a2-4ffea13d4307","Type":"ContainerStarted","Data":"a14af22109a43df162ef110a106d49b7f7be1f84cff59c30b9c9b9f979eae7e2"} Feb 27 09:26:02 crc kubenswrapper[4906]: I0227 09:26:02.218793 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-infra/auto-csr-approver-29536406-p2x5h" podStartSLOduration=1.343373494 podStartE2EDuration="2.218770424s" podCreationTimestamp="2026-02-27 09:26:00 +0000 UTC" firstStartedPulling="2026-02-27 09:26:00.970846802 +0000 UTC m=+3459.365248422" lastFinishedPulling="2026-02-27 09:26:01.846243742 +0000 UTC m=+3460.240645352" observedRunningTime="2026-02-27 09:26:02.210432752 +0000 UTC m=+3460.604834372" watchObservedRunningTime="2026-02-27 09:26:02.218770424 +0000 UTC m=+3460.613172034" Feb 27 09:26:03 crc kubenswrapper[4906]: I0227 09:26:03.198484 4906 generic.go:334] "Generic (PLEG): container finished" podID="e474a330-5abe-41f3-96a2-4ffea13d4307" containerID="a14af22109a43df162ef110a106d49b7f7be1f84cff59c30b9c9b9f979eae7e2" exitCode=0 Feb 27 09:26:03 crc kubenswrapper[4906]: I0227 09:26:03.198531 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536406-p2x5h" event={"ID":"e474a330-5abe-41f3-96a2-4ffea13d4307","Type":"ContainerDied","Data":"a14af22109a43df162ef110a106d49b7f7be1f84cff59c30b9c9b9f979eae7e2"} Feb 27 09:26:04 crc kubenswrapper[4906]: I0227 09:26:04.576030 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536406-p2x5h" Feb 27 09:26:04 crc kubenswrapper[4906]: I0227 09:26:04.726451 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfll2\" (UniqueName: \"kubernetes.io/projected/e474a330-5abe-41f3-96a2-4ffea13d4307-kube-api-access-rfll2\") pod \"e474a330-5abe-41f3-96a2-4ffea13d4307\" (UID: \"e474a330-5abe-41f3-96a2-4ffea13d4307\") " Feb 27 09:26:04 crc kubenswrapper[4906]: I0227 09:26:04.732158 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e474a330-5abe-41f3-96a2-4ffea13d4307-kube-api-access-rfll2" (OuterVolumeSpecName: "kube-api-access-rfll2") pod "e474a330-5abe-41f3-96a2-4ffea13d4307" (UID: "e474a330-5abe-41f3-96a2-4ffea13d4307"). InnerVolumeSpecName "kube-api-access-rfll2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:26:04 crc kubenswrapper[4906]: I0227 09:26:04.828601 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfll2\" (UniqueName: \"kubernetes.io/projected/e474a330-5abe-41f3-96a2-4ffea13d4307-kube-api-access-rfll2\") on node \"crc\" DevicePath \"\"" Feb 27 09:26:05 crc kubenswrapper[4906]: I0227 09:26:05.218205 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536406-p2x5h" event={"ID":"e474a330-5abe-41f3-96a2-4ffea13d4307","Type":"ContainerDied","Data":"78f6dee4753615136c712ca4fd74b5b38d0081082087c0f66e45392aa3529ada"} Feb 27 09:26:05 crc kubenswrapper[4906]: I0227 09:26:05.218246 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="78f6dee4753615136c712ca4fd74b5b38d0081082087c0f66e45392aa3529ada" Feb 27 09:26:05 crc kubenswrapper[4906]: I0227 09:26:05.218316 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536406-p2x5h" Feb 27 09:26:05 crc kubenswrapper[4906]: I0227 09:26:05.270189 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536400-dphzm"] Feb 27 09:26:05 crc kubenswrapper[4906]: I0227 09:26:05.277690 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536400-dphzm"] Feb 27 09:26:06 crc kubenswrapper[4906]: I0227 09:26:06.562766 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f1e0390-8255-4e15-9615-64695bd30b6f" path="/var/lib/kubelet/pods/0f1e0390-8255-4e15-9615-64695bd30b6f/volumes" Feb 27 09:26:07 crc kubenswrapper[4906]: I0227 09:26:07.885193 4906 scope.go:117] "RemoveContainer" containerID="ac3f72a786acd3982115012720e9fae5635f7e1c0d83001794b57fcc7455eeed" Feb 27 09:26:14 crc kubenswrapper[4906]: I0227 09:26:14.552658 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:26:14 crc kubenswrapper[4906]: E0227 09:26:14.553816 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:26:27 crc kubenswrapper[4906]: I0227 09:26:27.552566 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:26:27 crc kubenswrapper[4906]: E0227 09:26:27.553297 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:26:39 crc kubenswrapper[4906]: I0227 09:26:39.558845 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:26:39 crc kubenswrapper[4906]: E0227 09:26:39.562525 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:26:54 crc kubenswrapper[4906]: I0227 09:26:54.551991 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:26:54 crc kubenswrapper[4906]: E0227 09:26:54.552702 4906 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2s5wg_openshift-machine-config-operator(fc2f1b1e-37c4-45c1-8f9c-221faf5b777d)\"" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" podUID="fc2f1b1e-37c4-45c1-8f9c-221faf5b777d" Feb 27 09:27:05 crc kubenswrapper[4906]: I0227 09:27:05.552678 4906 scope.go:117] "RemoveContainer" containerID="a91dd7e1170c8c6152389016b4cb3807a07a0474d8d3a7c34a4269fcb73699d1" Feb 27 09:27:05 crc kubenswrapper[4906]: I0227 09:27:05.820142 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2s5wg" event={"ID":"fc2f1b1e-37c4-45c1-8f9c-221faf5b777d","Type":"ContainerStarted","Data":"b843f13ed8455ce81c21df652c5f2a5b4414bbc9b05eb600b80503bedad62198"} Feb 27 09:27:23 crc kubenswrapper[4906]: I0227 09:27:23.101815 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-c6p27"] Feb 27 09:27:23 crc kubenswrapper[4906]: E0227 09:27:23.102766 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e474a330-5abe-41f3-96a2-4ffea13d4307" containerName="oc" Feb 27 09:27:23 crc kubenswrapper[4906]: I0227 09:27:23.102782 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="e474a330-5abe-41f3-96a2-4ffea13d4307" containerName="oc" Feb 27 09:27:23 crc kubenswrapper[4906]: I0227 09:27:23.103080 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="e474a330-5abe-41f3-96a2-4ffea13d4307" containerName="oc" Feb 27 09:27:23 crc kubenswrapper[4906]: I0227 09:27:23.104657 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c6p27" Feb 27 09:27:23 crc kubenswrapper[4906]: I0227 09:27:23.112482 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c6p27"] Feb 27 09:27:23 crc kubenswrapper[4906]: I0227 09:27:23.190638 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f9af97a-675d-4985-9c1d-f27b3a06857d-catalog-content\") pod \"community-operators-c6p27\" (UID: \"2f9af97a-675d-4985-9c1d-f27b3a06857d\") " pod="openshift-marketplace/community-operators-c6p27" Feb 27 09:27:23 crc kubenswrapper[4906]: I0227 09:27:23.190982 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f9af97a-675d-4985-9c1d-f27b3a06857d-utilities\") pod \"community-operators-c6p27\" (UID: \"2f9af97a-675d-4985-9c1d-f27b3a06857d\") " pod="openshift-marketplace/community-operators-c6p27" Feb 27 09:27:23 crc kubenswrapper[4906]: I0227 09:27:23.191142 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzgmm\" (UniqueName: \"kubernetes.io/projected/2f9af97a-675d-4985-9c1d-f27b3a06857d-kube-api-access-kzgmm\") pod \"community-operators-c6p27\" (UID: \"2f9af97a-675d-4985-9c1d-f27b3a06857d\") " pod="openshift-marketplace/community-operators-c6p27" Feb 27 09:27:23 crc kubenswrapper[4906]: I0227 09:27:23.292814 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f9af97a-675d-4985-9c1d-f27b3a06857d-utilities\") pod \"community-operators-c6p27\" (UID: \"2f9af97a-675d-4985-9c1d-f27b3a06857d\") " pod="openshift-marketplace/community-operators-c6p27" Feb 27 09:27:23 crc kubenswrapper[4906]: I0227 09:27:23.292903 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzgmm\" (UniqueName: \"kubernetes.io/projected/2f9af97a-675d-4985-9c1d-f27b3a06857d-kube-api-access-kzgmm\") pod \"community-operators-c6p27\" (UID: \"2f9af97a-675d-4985-9c1d-f27b3a06857d\") " pod="openshift-marketplace/community-operators-c6p27" Feb 27 09:27:23 crc kubenswrapper[4906]: I0227 09:27:23.293032 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f9af97a-675d-4985-9c1d-f27b3a06857d-catalog-content\") pod \"community-operators-c6p27\" (UID: \"2f9af97a-675d-4985-9c1d-f27b3a06857d\") " pod="openshift-marketplace/community-operators-c6p27" Feb 27 09:27:23 crc kubenswrapper[4906]: I0227 09:27:23.293450 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f9af97a-675d-4985-9c1d-f27b3a06857d-utilities\") pod \"community-operators-c6p27\" (UID: \"2f9af97a-675d-4985-9c1d-f27b3a06857d\") " pod="openshift-marketplace/community-operators-c6p27" Feb 27 09:27:23 crc kubenswrapper[4906]: I0227 09:27:23.293759 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f9af97a-675d-4985-9c1d-f27b3a06857d-catalog-content\") pod \"community-operators-c6p27\" (UID: \"2f9af97a-675d-4985-9c1d-f27b3a06857d\") " pod="openshift-marketplace/community-operators-c6p27" Feb 27 09:27:23 crc kubenswrapper[4906]: I0227 09:27:23.311686 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzgmm\" (UniqueName: \"kubernetes.io/projected/2f9af97a-675d-4985-9c1d-f27b3a06857d-kube-api-access-kzgmm\") pod \"community-operators-c6p27\" (UID: \"2f9af97a-675d-4985-9c1d-f27b3a06857d\") " pod="openshift-marketplace/community-operators-c6p27" Feb 27 09:27:23 crc kubenswrapper[4906]: I0227 09:27:23.430220 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c6p27" Feb 27 09:27:23 crc kubenswrapper[4906]: W0227 09:27:23.979338 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f9af97a_675d_4985_9c1d_f27b3a06857d.slice/crio-053d8b0510361cddf9136e445764154e3eec016efb6e0a28e96c5db90124d796 WatchSource:0}: Error finding container 053d8b0510361cddf9136e445764154e3eec016efb6e0a28e96c5db90124d796: Status 404 returned error can't find the container with id 053d8b0510361cddf9136e445764154e3eec016efb6e0a28e96c5db90124d796 Feb 27 09:27:23 crc kubenswrapper[4906]: I0227 09:27:23.980205 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-c6p27"] Feb 27 09:27:24 crc kubenswrapper[4906]: I0227 09:27:24.007203 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6p27" event={"ID":"2f9af97a-675d-4985-9c1d-f27b3a06857d","Type":"ContainerStarted","Data":"053d8b0510361cddf9136e445764154e3eec016efb6e0a28e96c5db90124d796"} Feb 27 09:27:25 crc kubenswrapper[4906]: I0227 09:27:25.017184 4906 generic.go:334] "Generic (PLEG): container finished" podID="2f9af97a-675d-4985-9c1d-f27b3a06857d" containerID="3201fb819c1c4e2e6ded4e248dcf73af43ce1b7651683661a38719a6de8e898e" exitCode=0 Feb 27 09:27:25 crc kubenswrapper[4906]: I0227 09:27:25.017237 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6p27" event={"ID":"2f9af97a-675d-4985-9c1d-f27b3a06857d","Type":"ContainerDied","Data":"3201fb819c1c4e2e6ded4e248dcf73af43ce1b7651683661a38719a6de8e898e"} Feb 27 09:27:25 crc kubenswrapper[4906]: I0227 09:27:25.019439 4906 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 27 09:27:26 crc kubenswrapper[4906]: I0227 09:27:26.031227 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6p27" event={"ID":"2f9af97a-675d-4985-9c1d-f27b3a06857d","Type":"ContainerStarted","Data":"fd6484163dea9f880a2edbb70e4d509fe434f0a87ca50ea6b5c21f07c90d5be4"} Feb 27 09:27:28 crc kubenswrapper[4906]: I0227 09:27:28.062343 4906 generic.go:334] "Generic (PLEG): container finished" podID="2f9af97a-675d-4985-9c1d-f27b3a06857d" containerID="fd6484163dea9f880a2edbb70e4d509fe434f0a87ca50ea6b5c21f07c90d5be4" exitCode=0 Feb 27 09:27:28 crc kubenswrapper[4906]: I0227 09:27:28.062466 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6p27" event={"ID":"2f9af97a-675d-4985-9c1d-f27b3a06857d","Type":"ContainerDied","Data":"fd6484163dea9f880a2edbb70e4d509fe434f0a87ca50ea6b5c21f07c90d5be4"} Feb 27 09:27:29 crc kubenswrapper[4906]: I0227 09:27:29.076176 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6p27" event={"ID":"2f9af97a-675d-4985-9c1d-f27b3a06857d","Type":"ContainerStarted","Data":"e4b6f3209e59c7733700e39bfffae0f490fefec7ea374aee80f97dfa1e4655ec"} Feb 27 09:27:29 crc kubenswrapper[4906]: I0227 09:27:29.099756 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-c6p27" podStartSLOduration=2.692092268 podStartE2EDuration="6.099713114s" podCreationTimestamp="2026-02-27 09:27:23 +0000 UTC" firstStartedPulling="2026-02-27 09:27:25.019200835 +0000 UTC m=+3543.413602445" lastFinishedPulling="2026-02-27 09:27:28.426821671 +0000 UTC m=+3546.821223291" observedRunningTime="2026-02-27 09:27:29.094560836 +0000 UTC m=+3547.488962456" watchObservedRunningTime="2026-02-27 09:27:29.099713114 +0000 UTC m=+3547.494114724" Feb 27 09:27:33 crc kubenswrapper[4906]: I0227 09:27:33.431432 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-c6p27" Feb 27 09:27:33 crc kubenswrapper[4906]: I0227 09:27:33.432075 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-c6p27" Feb 27 09:27:33 crc kubenswrapper[4906]: I0227 09:27:33.498857 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-c6p27" Feb 27 09:27:34 crc kubenswrapper[4906]: I0227 09:27:34.172312 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-c6p27" Feb 27 09:27:34 crc kubenswrapper[4906]: I0227 09:27:34.226452 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c6p27"] Feb 27 09:27:36 crc kubenswrapper[4906]: I0227 09:27:36.137593 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-c6p27" podUID="2f9af97a-675d-4985-9c1d-f27b3a06857d" containerName="registry-server" containerID="cri-o://e4b6f3209e59c7733700e39bfffae0f490fefec7ea374aee80f97dfa1e4655ec" gracePeriod=2 Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.111452 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c6p27" Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.148867 4906 generic.go:334] "Generic (PLEG): container finished" podID="2f9af97a-675d-4985-9c1d-f27b3a06857d" containerID="e4b6f3209e59c7733700e39bfffae0f490fefec7ea374aee80f97dfa1e4655ec" exitCode=0 Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.148926 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6p27" event={"ID":"2f9af97a-675d-4985-9c1d-f27b3a06857d","Type":"ContainerDied","Data":"e4b6f3209e59c7733700e39bfffae0f490fefec7ea374aee80f97dfa1e4655ec"} Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.148966 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-c6p27" Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.148996 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-c6p27" event={"ID":"2f9af97a-675d-4985-9c1d-f27b3a06857d","Type":"ContainerDied","Data":"053d8b0510361cddf9136e445764154e3eec016efb6e0a28e96c5db90124d796"} Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.149017 4906 scope.go:117] "RemoveContainer" containerID="e4b6f3209e59c7733700e39bfffae0f490fefec7ea374aee80f97dfa1e4655ec" Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.173728 4906 scope.go:117] "RemoveContainer" containerID="fd6484163dea9f880a2edbb70e4d509fe434f0a87ca50ea6b5c21f07c90d5be4" Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.197673 4906 scope.go:117] "RemoveContainer" containerID="3201fb819c1c4e2e6ded4e248dcf73af43ce1b7651683661a38719a6de8e898e" Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.237568 4906 scope.go:117] "RemoveContainer" containerID="e4b6f3209e59c7733700e39bfffae0f490fefec7ea374aee80f97dfa1e4655ec" Feb 27 09:27:37 crc kubenswrapper[4906]: E0227 09:27:37.238438 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4b6f3209e59c7733700e39bfffae0f490fefec7ea374aee80f97dfa1e4655ec\": container with ID starting with e4b6f3209e59c7733700e39bfffae0f490fefec7ea374aee80f97dfa1e4655ec not found: ID does not exist" containerID="e4b6f3209e59c7733700e39bfffae0f490fefec7ea374aee80f97dfa1e4655ec" Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.238496 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4b6f3209e59c7733700e39bfffae0f490fefec7ea374aee80f97dfa1e4655ec"} err="failed to get container status \"e4b6f3209e59c7733700e39bfffae0f490fefec7ea374aee80f97dfa1e4655ec\": rpc error: code = NotFound desc = could not find container \"e4b6f3209e59c7733700e39bfffae0f490fefec7ea374aee80f97dfa1e4655ec\": container with ID starting with e4b6f3209e59c7733700e39bfffae0f490fefec7ea374aee80f97dfa1e4655ec not found: ID does not exist" Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.238523 4906 scope.go:117] "RemoveContainer" containerID="fd6484163dea9f880a2edbb70e4d509fe434f0a87ca50ea6b5c21f07c90d5be4" Feb 27 09:27:37 crc kubenswrapper[4906]: E0227 09:27:37.238991 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd6484163dea9f880a2edbb70e4d509fe434f0a87ca50ea6b5c21f07c90d5be4\": container with ID starting with fd6484163dea9f880a2edbb70e4d509fe434f0a87ca50ea6b5c21f07c90d5be4 not found: ID does not exist" containerID="fd6484163dea9f880a2edbb70e4d509fe434f0a87ca50ea6b5c21f07c90d5be4" Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.239032 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd6484163dea9f880a2edbb70e4d509fe434f0a87ca50ea6b5c21f07c90d5be4"} err="failed to get container status \"fd6484163dea9f880a2edbb70e4d509fe434f0a87ca50ea6b5c21f07c90d5be4\": rpc error: code = NotFound desc = could not find container \"fd6484163dea9f880a2edbb70e4d509fe434f0a87ca50ea6b5c21f07c90d5be4\": container with ID starting with fd6484163dea9f880a2edbb70e4d509fe434f0a87ca50ea6b5c21f07c90d5be4 not found: ID does not exist" Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.239059 4906 scope.go:117] "RemoveContainer" containerID="3201fb819c1c4e2e6ded4e248dcf73af43ce1b7651683661a38719a6de8e898e" Feb 27 09:27:37 crc kubenswrapper[4906]: E0227 09:27:37.239401 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3201fb819c1c4e2e6ded4e248dcf73af43ce1b7651683661a38719a6de8e898e\": container with ID starting with 3201fb819c1c4e2e6ded4e248dcf73af43ce1b7651683661a38719a6de8e898e not found: ID does not exist" containerID="3201fb819c1c4e2e6ded4e248dcf73af43ce1b7651683661a38719a6de8e898e" Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.239423 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3201fb819c1c4e2e6ded4e248dcf73af43ce1b7651683661a38719a6de8e898e"} err="failed to get container status \"3201fb819c1c4e2e6ded4e248dcf73af43ce1b7651683661a38719a6de8e898e\": rpc error: code = NotFound desc = could not find container \"3201fb819c1c4e2e6ded4e248dcf73af43ce1b7651683661a38719a6de8e898e\": container with ID starting with 3201fb819c1c4e2e6ded4e248dcf73af43ce1b7651683661a38719a6de8e898e not found: ID does not exist" Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.287063 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f9af97a-675d-4985-9c1d-f27b3a06857d-catalog-content\") pod \"2f9af97a-675d-4985-9c1d-f27b3a06857d\" (UID: \"2f9af97a-675d-4985-9c1d-f27b3a06857d\") " Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.287139 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kzgmm\" (UniqueName: \"kubernetes.io/projected/2f9af97a-675d-4985-9c1d-f27b3a06857d-kube-api-access-kzgmm\") pod \"2f9af97a-675d-4985-9c1d-f27b3a06857d\" (UID: \"2f9af97a-675d-4985-9c1d-f27b3a06857d\") " Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.287338 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f9af97a-675d-4985-9c1d-f27b3a06857d-utilities\") pod \"2f9af97a-675d-4985-9c1d-f27b3a06857d\" (UID: \"2f9af97a-675d-4985-9c1d-f27b3a06857d\") " Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.288388 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f9af97a-675d-4985-9c1d-f27b3a06857d-utilities" (OuterVolumeSpecName: "utilities") pod "2f9af97a-675d-4985-9c1d-f27b3a06857d" (UID: "2f9af97a-675d-4985-9c1d-f27b3a06857d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.297453 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f9af97a-675d-4985-9c1d-f27b3a06857d-kube-api-access-kzgmm" (OuterVolumeSpecName: "kube-api-access-kzgmm") pod "2f9af97a-675d-4985-9c1d-f27b3a06857d" (UID: "2f9af97a-675d-4985-9c1d-f27b3a06857d"). InnerVolumeSpecName "kube-api-access-kzgmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.340093 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f9af97a-675d-4985-9c1d-f27b3a06857d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2f9af97a-675d-4985-9c1d-f27b3a06857d" (UID: "2f9af97a-675d-4985-9c1d-f27b3a06857d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.389855 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2f9af97a-675d-4985-9c1d-f27b3a06857d-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.389904 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2f9af97a-675d-4985-9c1d-f27b3a06857d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.389917 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kzgmm\" (UniqueName: \"kubernetes.io/projected/2f9af97a-675d-4985-9c1d-f27b3a06857d-kube-api-access-kzgmm\") on node \"crc\" DevicePath \"\"" Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.500190 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-c6p27"] Feb 27 09:27:37 crc kubenswrapper[4906]: I0227 09:27:37.522774 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-c6p27"] Feb 27 09:27:38 crc kubenswrapper[4906]: I0227 09:27:38.561653 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f9af97a-675d-4985-9c1d-f27b3a06857d" path="/var/lib/kubelet/pods/2f9af97a-675d-4985-9c1d-f27b3a06857d/volumes" Feb 27 09:28:00 crc kubenswrapper[4906]: I0227 09:28:00.202799 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29536408-gwwhf"] Feb 27 09:28:00 crc kubenswrapper[4906]: E0227 09:28:00.204899 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f9af97a-675d-4985-9c1d-f27b3a06857d" containerName="registry-server" Feb 27 09:28:00 crc kubenswrapper[4906]: I0227 09:28:00.204919 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f9af97a-675d-4985-9c1d-f27b3a06857d" containerName="registry-server" Feb 27 09:28:00 crc kubenswrapper[4906]: E0227 09:28:00.204950 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f9af97a-675d-4985-9c1d-f27b3a06857d" containerName="extract-content" Feb 27 09:28:00 crc kubenswrapper[4906]: I0227 09:28:00.204957 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f9af97a-675d-4985-9c1d-f27b3a06857d" containerName="extract-content" Feb 27 09:28:00 crc kubenswrapper[4906]: E0227 09:28:00.204969 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f9af97a-675d-4985-9c1d-f27b3a06857d" containerName="extract-utilities" Feb 27 09:28:00 crc kubenswrapper[4906]: I0227 09:28:00.204978 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f9af97a-675d-4985-9c1d-f27b3a06857d" containerName="extract-utilities" Feb 27 09:28:00 crc kubenswrapper[4906]: I0227 09:28:00.205363 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f9af97a-675d-4985-9c1d-f27b3a06857d" containerName="registry-server" Feb 27 09:28:00 crc kubenswrapper[4906]: I0227 09:28:00.206350 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536408-gwwhf" Feb 27 09:28:00 crc kubenswrapper[4906]: I0227 09:28:00.208710 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Feb 27 09:28:00 crc kubenswrapper[4906]: I0227 09:28:00.210074 4906 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Feb 27 09:28:00 crc kubenswrapper[4906]: I0227 09:28:00.210966 4906 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-xlzfs" Feb 27 09:28:00 crc kubenswrapper[4906]: I0227 09:28:00.215002 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536408-gwwhf"] Feb 27 09:28:00 crc kubenswrapper[4906]: I0227 09:28:00.345694 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rs8h\" (UniqueName: \"kubernetes.io/projected/c2cea4d2-8032-447b-bbc3-bb65b4fd6e87-kube-api-access-4rs8h\") pod \"auto-csr-approver-29536408-gwwhf\" (UID: \"c2cea4d2-8032-447b-bbc3-bb65b4fd6e87\") " pod="openshift-infra/auto-csr-approver-29536408-gwwhf" Feb 27 09:28:00 crc kubenswrapper[4906]: I0227 09:28:00.447296 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rs8h\" (UniqueName: \"kubernetes.io/projected/c2cea4d2-8032-447b-bbc3-bb65b4fd6e87-kube-api-access-4rs8h\") pod \"auto-csr-approver-29536408-gwwhf\" (UID: \"c2cea4d2-8032-447b-bbc3-bb65b4fd6e87\") " pod="openshift-infra/auto-csr-approver-29536408-gwwhf" Feb 27 09:28:00 crc kubenswrapper[4906]: I0227 09:28:00.465052 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rs8h\" (UniqueName: \"kubernetes.io/projected/c2cea4d2-8032-447b-bbc3-bb65b4fd6e87-kube-api-access-4rs8h\") pod \"auto-csr-approver-29536408-gwwhf\" (UID: \"c2cea4d2-8032-447b-bbc3-bb65b4fd6e87\") " pod="openshift-infra/auto-csr-approver-29536408-gwwhf" Feb 27 09:28:00 crc kubenswrapper[4906]: I0227 09:28:00.538448 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536408-gwwhf" Feb 27 09:28:00 crc kubenswrapper[4906]: I0227 09:28:00.988424 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29536408-gwwhf"] Feb 27 09:28:01 crc kubenswrapper[4906]: W0227 09:28:01.002074 4906 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2cea4d2_8032_447b_bbc3_bb65b4fd6e87.slice/crio-4174e47bae35c75ce53ef459ee5fad656024d8eb34fde19a282888b98c5ae1c5 WatchSource:0}: Error finding container 4174e47bae35c75ce53ef459ee5fad656024d8eb34fde19a282888b98c5ae1c5: Status 404 returned error can't find the container with id 4174e47bae35c75ce53ef459ee5fad656024d8eb34fde19a282888b98c5ae1c5 Feb 27 09:28:01 crc kubenswrapper[4906]: I0227 09:28:01.382756 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536408-gwwhf" event={"ID":"c2cea4d2-8032-447b-bbc3-bb65b4fd6e87","Type":"ContainerStarted","Data":"4174e47bae35c75ce53ef459ee5fad656024d8eb34fde19a282888b98c5ae1c5"} Feb 27 09:28:03 crc kubenswrapper[4906]: I0227 09:28:03.410436 4906 generic.go:334] "Generic (PLEG): container finished" podID="c2cea4d2-8032-447b-bbc3-bb65b4fd6e87" containerID="faad223da6a29b50218d8bacf198805155a41d628f76194dd5be9ff74cdaae07" exitCode=0 Feb 27 09:28:03 crc kubenswrapper[4906]: I0227 09:28:03.410476 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536408-gwwhf" event={"ID":"c2cea4d2-8032-447b-bbc3-bb65b4fd6e87","Type":"ContainerDied","Data":"faad223da6a29b50218d8bacf198805155a41d628f76194dd5be9ff74cdaae07"} Feb 27 09:28:04 crc kubenswrapper[4906]: I0227 09:28:04.808101 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536408-gwwhf" Feb 27 09:28:04 crc kubenswrapper[4906]: I0227 09:28:04.937638 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4rs8h\" (UniqueName: \"kubernetes.io/projected/c2cea4d2-8032-447b-bbc3-bb65b4fd6e87-kube-api-access-4rs8h\") pod \"c2cea4d2-8032-447b-bbc3-bb65b4fd6e87\" (UID: \"c2cea4d2-8032-447b-bbc3-bb65b4fd6e87\") " Feb 27 09:28:04 crc kubenswrapper[4906]: I0227 09:28:04.953204 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2cea4d2-8032-447b-bbc3-bb65b4fd6e87-kube-api-access-4rs8h" (OuterVolumeSpecName: "kube-api-access-4rs8h") pod "c2cea4d2-8032-447b-bbc3-bb65b4fd6e87" (UID: "c2cea4d2-8032-447b-bbc3-bb65b4fd6e87"). InnerVolumeSpecName "kube-api-access-4rs8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:28:05 crc kubenswrapper[4906]: I0227 09:28:05.040600 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4rs8h\" (UniqueName: \"kubernetes.io/projected/c2cea4d2-8032-447b-bbc3-bb65b4fd6e87-kube-api-access-4rs8h\") on node \"crc\" DevicePath \"\"" Feb 27 09:28:05 crc kubenswrapper[4906]: I0227 09:28:05.436677 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29536408-gwwhf" event={"ID":"c2cea4d2-8032-447b-bbc3-bb65b4fd6e87","Type":"ContainerDied","Data":"4174e47bae35c75ce53ef459ee5fad656024d8eb34fde19a282888b98c5ae1c5"} Feb 27 09:28:05 crc kubenswrapper[4906]: I0227 09:28:05.436745 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29536408-gwwhf" Feb 27 09:28:05 crc kubenswrapper[4906]: I0227 09:28:05.436761 4906 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4174e47bae35c75ce53ef459ee5fad656024d8eb34fde19a282888b98c5ae1c5" Feb 27 09:28:05 crc kubenswrapper[4906]: I0227 09:28:05.907164 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29536402-62zsc"] Feb 27 09:28:05 crc kubenswrapper[4906]: I0227 09:28:05.925417 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29536402-62zsc"] Feb 27 09:28:06 crc kubenswrapper[4906]: I0227 09:28:06.563944 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7fc749ea-eaa6-4163-90e5-c4898efb6235" path="/var/lib/kubelet/pods/7fc749ea-eaa6-4163-90e5-c4898efb6235/volumes" Feb 27 09:28:07 crc kubenswrapper[4906]: I0227 09:28:07.995471 4906 scope.go:117] "RemoveContainer" containerID="381b73eea4b338439d951c5ac8cf43ff78ebecfdd10885a09d22310b802f9e77" Feb 27 09:28:36 crc kubenswrapper[4906]: I0227 09:28:36.127340 4906 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7w29g"] Feb 27 09:28:36 crc kubenswrapper[4906]: E0227 09:28:36.130830 4906 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2cea4d2-8032-447b-bbc3-bb65b4fd6e87" containerName="oc" Feb 27 09:28:36 crc kubenswrapper[4906]: I0227 09:28:36.131001 4906 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2cea4d2-8032-447b-bbc3-bb65b4fd6e87" containerName="oc" Feb 27 09:28:36 crc kubenswrapper[4906]: I0227 09:28:36.131427 4906 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2cea4d2-8032-447b-bbc3-bb65b4fd6e87" containerName="oc" Feb 27 09:28:36 crc kubenswrapper[4906]: I0227 09:28:36.134279 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7w29g" Feb 27 09:28:36 crc kubenswrapper[4906]: I0227 09:28:36.167782 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7w29g"] Feb 27 09:28:36 crc kubenswrapper[4906]: I0227 09:28:36.318339 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21b9636b-95c3-457b-9c6e-8103b64fe313-catalog-content\") pod \"redhat-marketplace-7w29g\" (UID: \"21b9636b-95c3-457b-9c6e-8103b64fe313\") " pod="openshift-marketplace/redhat-marketplace-7w29g" Feb 27 09:28:36 crc kubenswrapper[4906]: I0227 09:28:36.318609 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21b9636b-95c3-457b-9c6e-8103b64fe313-utilities\") pod \"redhat-marketplace-7w29g\" (UID: \"21b9636b-95c3-457b-9c6e-8103b64fe313\") " pod="openshift-marketplace/redhat-marketplace-7w29g" Feb 27 09:28:36 crc kubenswrapper[4906]: I0227 09:28:36.318796 4906 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cdjh8\" (UniqueName: \"kubernetes.io/projected/21b9636b-95c3-457b-9c6e-8103b64fe313-kube-api-access-cdjh8\") pod \"redhat-marketplace-7w29g\" (UID: \"21b9636b-95c3-457b-9c6e-8103b64fe313\") " pod="openshift-marketplace/redhat-marketplace-7w29g" Feb 27 09:28:36 crc kubenswrapper[4906]: I0227 09:28:36.420054 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21b9636b-95c3-457b-9c6e-8103b64fe313-catalog-content\") pod \"redhat-marketplace-7w29g\" (UID: \"21b9636b-95c3-457b-9c6e-8103b64fe313\") " pod="openshift-marketplace/redhat-marketplace-7w29g" Feb 27 09:28:36 crc kubenswrapper[4906]: I0227 09:28:36.420142 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21b9636b-95c3-457b-9c6e-8103b64fe313-utilities\") pod \"redhat-marketplace-7w29g\" (UID: \"21b9636b-95c3-457b-9c6e-8103b64fe313\") " pod="openshift-marketplace/redhat-marketplace-7w29g" Feb 27 09:28:36 crc kubenswrapper[4906]: I0227 09:28:36.420282 4906 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cdjh8\" (UniqueName: \"kubernetes.io/projected/21b9636b-95c3-457b-9c6e-8103b64fe313-kube-api-access-cdjh8\") pod \"redhat-marketplace-7w29g\" (UID: \"21b9636b-95c3-457b-9c6e-8103b64fe313\") " pod="openshift-marketplace/redhat-marketplace-7w29g" Feb 27 09:28:36 crc kubenswrapper[4906]: I0227 09:28:36.420749 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21b9636b-95c3-457b-9c6e-8103b64fe313-utilities\") pod \"redhat-marketplace-7w29g\" (UID: \"21b9636b-95c3-457b-9c6e-8103b64fe313\") " pod="openshift-marketplace/redhat-marketplace-7w29g" Feb 27 09:28:36 crc kubenswrapper[4906]: I0227 09:28:36.421353 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21b9636b-95c3-457b-9c6e-8103b64fe313-catalog-content\") pod \"redhat-marketplace-7w29g\" (UID: \"21b9636b-95c3-457b-9c6e-8103b64fe313\") " pod="openshift-marketplace/redhat-marketplace-7w29g" Feb 27 09:28:36 crc kubenswrapper[4906]: I0227 09:28:36.511011 4906 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cdjh8\" (UniqueName: \"kubernetes.io/projected/21b9636b-95c3-457b-9c6e-8103b64fe313-kube-api-access-cdjh8\") pod \"redhat-marketplace-7w29g\" (UID: \"21b9636b-95c3-457b-9c6e-8103b64fe313\") " pod="openshift-marketplace/redhat-marketplace-7w29g" Feb 27 09:28:36 crc kubenswrapper[4906]: I0227 09:28:36.756540 4906 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7w29g" Feb 27 09:28:37 crc kubenswrapper[4906]: I0227 09:28:37.214345 4906 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7w29g"] Feb 27 09:28:37 crc kubenswrapper[4906]: I0227 09:28:37.847013 4906 generic.go:334] "Generic (PLEG): container finished" podID="21b9636b-95c3-457b-9c6e-8103b64fe313" containerID="dbd047b7aad1b92c06fe33586a7f83df24f47effa4a3a003afcaaa26ff2c7622" exitCode=0 Feb 27 09:28:37 crc kubenswrapper[4906]: I0227 09:28:37.847067 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7w29g" event={"ID":"21b9636b-95c3-457b-9c6e-8103b64fe313","Type":"ContainerDied","Data":"dbd047b7aad1b92c06fe33586a7f83df24f47effa4a3a003afcaaa26ff2c7622"} Feb 27 09:28:37 crc kubenswrapper[4906]: I0227 09:28:37.847103 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7w29g" event={"ID":"21b9636b-95c3-457b-9c6e-8103b64fe313","Type":"ContainerStarted","Data":"6fe5ef799bcd0b100c58837fbc395eda69026ca2826625b4c429a70f5e6ee938"} Feb 27 09:28:38 crc kubenswrapper[4906]: I0227 09:28:38.858538 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7w29g" event={"ID":"21b9636b-95c3-457b-9c6e-8103b64fe313","Type":"ContainerStarted","Data":"a49ed9ae0003bf61b49172c6df074e26847f2f3b7e2bab1f2ad82ac888c0a8eb"} Feb 27 09:28:39 crc kubenswrapper[4906]: I0227 09:28:39.876661 4906 generic.go:334] "Generic (PLEG): container finished" podID="21b9636b-95c3-457b-9c6e-8103b64fe313" containerID="a49ed9ae0003bf61b49172c6df074e26847f2f3b7e2bab1f2ad82ac888c0a8eb" exitCode=0 Feb 27 09:28:39 crc kubenswrapper[4906]: I0227 09:28:39.876803 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7w29g" event={"ID":"21b9636b-95c3-457b-9c6e-8103b64fe313","Type":"ContainerDied","Data":"a49ed9ae0003bf61b49172c6df074e26847f2f3b7e2bab1f2ad82ac888c0a8eb"} Feb 27 09:28:40 crc kubenswrapper[4906]: I0227 09:28:40.890451 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7w29g" event={"ID":"21b9636b-95c3-457b-9c6e-8103b64fe313","Type":"ContainerStarted","Data":"048348ae1c24cb0c685ca45485ebcee2405b9fef0b4d9778c25330b4308e3c3c"} Feb 27 09:28:40 crc kubenswrapper[4906]: I0227 09:28:40.921767 4906 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7w29g" podStartSLOduration=2.478039641 podStartE2EDuration="4.92173921s" podCreationTimestamp="2026-02-27 09:28:36 +0000 UTC" firstStartedPulling="2026-02-27 09:28:37.849438601 +0000 UTC m=+3616.243840231" lastFinishedPulling="2026-02-27 09:28:40.29313819 +0000 UTC m=+3618.687539800" observedRunningTime="2026-02-27 09:28:40.913164251 +0000 UTC m=+3619.307565871" watchObservedRunningTime="2026-02-27 09:28:40.92173921 +0000 UTC m=+3619.316140860" Feb 27 09:28:46 crc kubenswrapper[4906]: I0227 09:28:46.757614 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7w29g" Feb 27 09:28:46 crc kubenswrapper[4906]: I0227 09:28:46.758655 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7w29g" Feb 27 09:28:46 crc kubenswrapper[4906]: I0227 09:28:46.828496 4906 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7w29g" Feb 27 09:28:46 crc kubenswrapper[4906]: I0227 09:28:46.990541 4906 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7w29g" Feb 27 09:28:47 crc kubenswrapper[4906]: I0227 09:28:47.065288 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7w29g"] Feb 27 09:28:48 crc kubenswrapper[4906]: I0227 09:28:48.970218 4906 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7w29g" podUID="21b9636b-95c3-457b-9c6e-8103b64fe313" containerName="registry-server" containerID="cri-o://048348ae1c24cb0c685ca45485ebcee2405b9fef0b4d9778c25330b4308e3c3c" gracePeriod=2 Feb 27 09:28:49 crc kubenswrapper[4906]: I0227 09:28:49.410548 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7w29g" Feb 27 09:28:49 crc kubenswrapper[4906]: I0227 09:28:49.499185 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cdjh8\" (UniqueName: \"kubernetes.io/projected/21b9636b-95c3-457b-9c6e-8103b64fe313-kube-api-access-cdjh8\") pod \"21b9636b-95c3-457b-9c6e-8103b64fe313\" (UID: \"21b9636b-95c3-457b-9c6e-8103b64fe313\") " Feb 27 09:28:49 crc kubenswrapper[4906]: I0227 09:28:49.499826 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21b9636b-95c3-457b-9c6e-8103b64fe313-utilities\") pod \"21b9636b-95c3-457b-9c6e-8103b64fe313\" (UID: \"21b9636b-95c3-457b-9c6e-8103b64fe313\") " Feb 27 09:28:49 crc kubenswrapper[4906]: I0227 09:28:49.499978 4906 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21b9636b-95c3-457b-9c6e-8103b64fe313-catalog-content\") pod \"21b9636b-95c3-457b-9c6e-8103b64fe313\" (UID: \"21b9636b-95c3-457b-9c6e-8103b64fe313\") " Feb 27 09:28:49 crc kubenswrapper[4906]: I0227 09:28:49.500903 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21b9636b-95c3-457b-9c6e-8103b64fe313-utilities" (OuterVolumeSpecName: "utilities") pod "21b9636b-95c3-457b-9c6e-8103b64fe313" (UID: "21b9636b-95c3-457b-9c6e-8103b64fe313"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:28:49 crc kubenswrapper[4906]: I0227 09:28:49.506566 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21b9636b-95c3-457b-9c6e-8103b64fe313-kube-api-access-cdjh8" (OuterVolumeSpecName: "kube-api-access-cdjh8") pod "21b9636b-95c3-457b-9c6e-8103b64fe313" (UID: "21b9636b-95c3-457b-9c6e-8103b64fe313"). InnerVolumeSpecName "kube-api-access-cdjh8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 27 09:28:49 crc kubenswrapper[4906]: I0227 09:28:49.586928 4906 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/21b9636b-95c3-457b-9c6e-8103b64fe313-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "21b9636b-95c3-457b-9c6e-8103b64fe313" (UID: "21b9636b-95c3-457b-9c6e-8103b64fe313"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 27 09:28:49 crc kubenswrapper[4906]: I0227 09:28:49.602179 4906 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/21b9636b-95c3-457b-9c6e-8103b64fe313-utilities\") on node \"crc\" DevicePath \"\"" Feb 27 09:28:49 crc kubenswrapper[4906]: I0227 09:28:49.602234 4906 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/21b9636b-95c3-457b-9c6e-8103b64fe313-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 27 09:28:49 crc kubenswrapper[4906]: I0227 09:28:49.602249 4906 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cdjh8\" (UniqueName: \"kubernetes.io/projected/21b9636b-95c3-457b-9c6e-8103b64fe313-kube-api-access-cdjh8\") on node \"crc\" DevicePath \"\"" Feb 27 09:28:49 crc kubenswrapper[4906]: I0227 09:28:49.984176 4906 generic.go:334] "Generic (PLEG): container finished" podID="21b9636b-95c3-457b-9c6e-8103b64fe313" containerID="048348ae1c24cb0c685ca45485ebcee2405b9fef0b4d9778c25330b4308e3c3c" exitCode=0 Feb 27 09:28:49 crc kubenswrapper[4906]: I0227 09:28:49.984220 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7w29g" event={"ID":"21b9636b-95c3-457b-9c6e-8103b64fe313","Type":"ContainerDied","Data":"048348ae1c24cb0c685ca45485ebcee2405b9fef0b4d9778c25330b4308e3c3c"} Feb 27 09:28:49 crc kubenswrapper[4906]: I0227 09:28:49.984246 4906 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7w29g" event={"ID":"21b9636b-95c3-457b-9c6e-8103b64fe313","Type":"ContainerDied","Data":"6fe5ef799bcd0b100c58837fbc395eda69026ca2826625b4c429a70f5e6ee938"} Feb 27 09:28:49 crc kubenswrapper[4906]: I0227 09:28:49.984263 4906 scope.go:117] "RemoveContainer" containerID="048348ae1c24cb0c685ca45485ebcee2405b9fef0b4d9778c25330b4308e3c3c" Feb 27 09:28:49 crc kubenswrapper[4906]: I0227 09:28:49.984329 4906 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7w29g" Feb 27 09:28:50 crc kubenswrapper[4906]: I0227 09:28:50.004818 4906 scope.go:117] "RemoveContainer" containerID="a49ed9ae0003bf61b49172c6df074e26847f2f3b7e2bab1f2ad82ac888c0a8eb" Feb 27 09:28:50 crc kubenswrapper[4906]: I0227 09:28:50.041897 4906 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7w29g"] Feb 27 09:28:50 crc kubenswrapper[4906]: I0227 09:28:50.044542 4906 scope.go:117] "RemoveContainer" containerID="dbd047b7aad1b92c06fe33586a7f83df24f47effa4a3a003afcaaa26ff2c7622" Feb 27 09:28:50 crc kubenswrapper[4906]: I0227 09:28:50.055277 4906 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7w29g"] Feb 27 09:28:50 crc kubenswrapper[4906]: I0227 09:28:50.081597 4906 scope.go:117] "RemoveContainer" containerID="048348ae1c24cb0c685ca45485ebcee2405b9fef0b4d9778c25330b4308e3c3c" Feb 27 09:28:50 crc kubenswrapper[4906]: E0227 09:28:50.082272 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"048348ae1c24cb0c685ca45485ebcee2405b9fef0b4d9778c25330b4308e3c3c\": container with ID starting with 048348ae1c24cb0c685ca45485ebcee2405b9fef0b4d9778c25330b4308e3c3c not found: ID does not exist" containerID="048348ae1c24cb0c685ca45485ebcee2405b9fef0b4d9778c25330b4308e3c3c" Feb 27 09:28:50 crc kubenswrapper[4906]: I0227 09:28:50.082323 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"048348ae1c24cb0c685ca45485ebcee2405b9fef0b4d9778c25330b4308e3c3c"} err="failed to get container status \"048348ae1c24cb0c685ca45485ebcee2405b9fef0b4d9778c25330b4308e3c3c\": rpc error: code = NotFound desc = could not find container \"048348ae1c24cb0c685ca45485ebcee2405b9fef0b4d9778c25330b4308e3c3c\": container with ID starting with 048348ae1c24cb0c685ca45485ebcee2405b9fef0b4d9778c25330b4308e3c3c not found: ID does not exist" Feb 27 09:28:50 crc kubenswrapper[4906]: I0227 09:28:50.082358 4906 scope.go:117] "RemoveContainer" containerID="a49ed9ae0003bf61b49172c6df074e26847f2f3b7e2bab1f2ad82ac888c0a8eb" Feb 27 09:28:50 crc kubenswrapper[4906]: E0227 09:28:50.082899 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a49ed9ae0003bf61b49172c6df074e26847f2f3b7e2bab1f2ad82ac888c0a8eb\": container with ID starting with a49ed9ae0003bf61b49172c6df074e26847f2f3b7e2bab1f2ad82ac888c0a8eb not found: ID does not exist" containerID="a49ed9ae0003bf61b49172c6df074e26847f2f3b7e2bab1f2ad82ac888c0a8eb" Feb 27 09:28:50 crc kubenswrapper[4906]: I0227 09:28:50.082950 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a49ed9ae0003bf61b49172c6df074e26847f2f3b7e2bab1f2ad82ac888c0a8eb"} err="failed to get container status \"a49ed9ae0003bf61b49172c6df074e26847f2f3b7e2bab1f2ad82ac888c0a8eb\": rpc error: code = NotFound desc = could not find container \"a49ed9ae0003bf61b49172c6df074e26847f2f3b7e2bab1f2ad82ac888c0a8eb\": container with ID starting with a49ed9ae0003bf61b49172c6df074e26847f2f3b7e2bab1f2ad82ac888c0a8eb not found: ID does not exist" Feb 27 09:28:50 crc kubenswrapper[4906]: I0227 09:28:50.082983 4906 scope.go:117] "RemoveContainer" containerID="dbd047b7aad1b92c06fe33586a7f83df24f47effa4a3a003afcaaa26ff2c7622" Feb 27 09:28:50 crc kubenswrapper[4906]: E0227 09:28:50.083331 4906 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbd047b7aad1b92c06fe33586a7f83df24f47effa4a3a003afcaaa26ff2c7622\": container with ID starting with dbd047b7aad1b92c06fe33586a7f83df24f47effa4a3a003afcaaa26ff2c7622 not found: ID does not exist" containerID="dbd047b7aad1b92c06fe33586a7f83df24f47effa4a3a003afcaaa26ff2c7622" Feb 27 09:28:50 crc kubenswrapper[4906]: I0227 09:28:50.083356 4906 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbd047b7aad1b92c06fe33586a7f83df24f47effa4a3a003afcaaa26ff2c7622"} err="failed to get container status \"dbd047b7aad1b92c06fe33586a7f83df24f47effa4a3a003afcaaa26ff2c7622\": rpc error: code = NotFound desc = could not find container \"dbd047b7aad1b92c06fe33586a7f83df24f47effa4a3a003afcaaa26ff2c7622\": container with ID starting with dbd047b7aad1b92c06fe33586a7f83df24f47effa4a3a003afcaaa26ff2c7622 not found: ID does not exist" Feb 27 09:28:50 crc kubenswrapper[4906]: I0227 09:28:50.573121 4906 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21b9636b-95c3-457b-9c6e-8103b64fe313" path="/var/lib/kubelet/pods/21b9636b-95c3-457b-9c6e-8103b64fe313/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515150261751024450 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015150261751017365 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015150252332016503 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015150252332015453 5ustar corecore